Ejemplo n.º 1
0
 def __init__(self, repo: Repo, release_commit: str, release_type: str):
     self.repo = repo
     self._release_commit = ""
     self.release_commit = release_commit
     self.release_type = release_type
     self._git = git
     self._version = get_version_from_repo(git=self._git)
     self._release_branch = ""
     self._rollback_stack = []  # type: List[str]
Ejemplo n.º 2
0
def main():
    logging.basicConfig(level=logging.INFO)

    build_check_name = sys.argv[1]
    build_name = sys.argv[2]

    build_config = get_build_config(build_check_name, build_name)

    if not os.path.exists(TEMP_PATH):
        os.makedirs(TEMP_PATH)

    pr_info = PRInfo()

    logging.info("Repo copy path %s", REPO_COPY)

    s3_helper = S3Helper("https://s3.amazonaws.com")

    version = get_version_from_repo(REPO_COPY)
    release_or_pr = get_release_or_pr(pr_info, build_config, version)

    s3_path_prefix = "/".join((release_or_pr, pr_info.sha, build_name))

    # If this is rerun, then we try to find already created artifacts and just
    # put them as github actions artifcat (result)
    build_results = get_build_results_if_exists(s3_helper, s3_path_prefix)
    if build_results is not None and len(build_results) > 0:
        logging.info("Some build results found %s", build_results)
        build_urls = []
        log_url = ""
        for url in build_results:
            if "build_log.log" in url:
                log_url = "https://s3.amazonaws.com/clickhouse-builds/" + url.replace(
                    "+", "%2B").replace(" ", "%20")
            else:
                build_urls.append(
                    "https://s3.amazonaws.com/clickhouse-builds/" +
                    url.replace("+", "%2B").replace(" ", "%20"))
        create_json_artifact(
            TEMP_PATH,
            build_name,
            log_url,
            build_urls,
            build_config,
            0,
            len(build_urls) > 0,
        )
        return

    image_name = get_image_name(build_config)
    docker_image = get_image_with_version(IMAGES_PATH, image_name)
    image_version = docker_image.version

    logging.info("Got version from repo %s", version.get_version_string())

    version_type = "testing"
    if "release" in pr_info.labels or "release-lts" in pr_info.labels:
        version_type = "stable"

    update_version_local(REPO_COPY, pr_info.sha, version, version_type)

    logging.info("Updated local files with version")

    logging.info("Build short name %s", build_name)

    build_output_path = os.path.join(TEMP_PATH, build_name)
    if not os.path.exists(build_output_path):
        os.makedirs(build_output_path)

    ccache_path = os.path.join(CACHES_PATH, build_name + "_ccache")

    logging.info("Will try to fetch cache for our build")
    get_ccache_if_not_exists(ccache_path, s3_helper, pr_info.number, TEMP_PATH)

    if not os.path.exists(ccache_path):
        logging.info("cache was not fetched, will create empty dir")
        os.makedirs(ccache_path)

    if build_config["package_type"] == "performance" and pr_info.number != 0:
        # because perf tests store some information about git commits
        subprocess.check_call(
            f"cd {REPO_COPY} && git fetch origin master:master", shell=True)

    packager_cmd = get_packager_cmd(
        build_config,
        os.path.join(REPO_COPY, "docker/packager"),
        build_output_path,
        version.get_version_string(),
        image_version,
        ccache_path,
        pr_info,
    )
    logging.info("Going to run packager with %s", packager_cmd)

    build_clickhouse_log = os.path.join(TEMP_PATH, "build_log")
    if not os.path.exists(build_clickhouse_log):
        os.makedirs(build_clickhouse_log)

    start = time.time()
    log_path, success = build_clickhouse(packager_cmd, build_clickhouse_log,
                                         build_output_path)
    elapsed = int(time.time() - start)
    subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {build_output_path}",
                          shell=True)
    subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {ccache_path}",
                          shell=True)
    logging.info("Build finished with %s, log path %s", success, log_path)

    logging.info("Will upload cache")
    upload_ccache(ccache_path, s3_helper, pr_info.number, TEMP_PATH)

    if os.path.exists(log_path):
        log_url = s3_helper.upload_build_file_to_s3(
            log_path, s3_path_prefix + "/" + os.path.basename(log_path))
        logging.info("Log url %s", log_url)
    else:
        logging.info("Build log doesn't exist")

    build_urls = s3_helper.upload_build_folder_to_s3(
        build_output_path,
        s3_path_prefix,
        keep_dirs_in_s3_path=False,
        upload_symlinks=False,
    )
    logging.info("Got build URLs %s", build_urls)

    print("::notice ::Build URLs: {}".format("\n".join(build_urls)))

    print("::notice ::Log URL: {}".format(log_url))

    create_json_artifact(TEMP_PATH, build_name, log_url, build_urls,
                         build_config, elapsed, success)

    upload_master_static_binaries(pr_info, build_config, s3_helper,
                                  build_output_path)
    # Fail build job if not successeded
    if not success:
        sys.exit(1)
Ejemplo n.º 3
0
def main():
    logging.basicConfig(level=logging.INFO)

    build_name = sys.argv[1]

    build_config = CI_CONFIG["build_config"][build_name]

    if not os.path.exists(TEMP_PATH):
        os.makedirs(TEMP_PATH)

    pr_info = PRInfo()

    logging.info("Repo copy path %s", REPO_COPY)

    s3_helper = S3Helper()

    version = get_version_from_repo(git=Git(True))
    release_or_pr, performance_pr = get_release_or_pr(pr_info, version)

    s3_path_prefix = "/".join((release_or_pr, pr_info.sha, build_name))
    # FIXME performance
    s3_performance_path = "/".join(
        (performance_pr, pr_info.sha, build_name, "performance.tgz"))

    # If this is rerun, then we try to find already created artifacts and just
    # put them as github actions artifact (result)
    check_for_success_run(s3_helper, s3_path_prefix, build_name, build_config)

    docker_image = get_image_with_version(IMAGES_PATH, IMAGE_NAME)
    image_version = docker_image.version

    logging.info("Got version from repo %s", version.string)

    official_flag = pr_info.number == 0
    if "official" in build_config:
        official_flag = build_config["official"]

    version_type = "testing"
    if "release" in pr_info.labels or "release-lts" in pr_info.labels:
        version_type = "stable"
        official_flag = True

    update_version_local(version, version_type)

    logging.info("Updated local files with version")

    logging.info("Build short name %s", build_name)

    build_output_path = os.path.join(TEMP_PATH, build_name)
    if not os.path.exists(build_output_path):
        os.makedirs(build_output_path)

    ccache_path = os.path.join(CACHES_PATH, build_name + "_ccache")

    logging.info("Will try to fetch cache for our build")
    try:
        get_ccache_if_not_exists(ccache_path, s3_helper, pr_info.number,
                                 TEMP_PATH)
    except Exception as e:
        # In case there are issues with ccache, remove the path and do not fail a build
        logging.info("Failed to get ccache, building without it. Error: %s", e)
        rmtree(ccache_path, ignore_errors=True)

    if not os.path.exists(ccache_path):
        logging.info("cache was not fetched, will create empty dir")
        os.makedirs(ccache_path)

    packager_cmd = get_packager_cmd(
        build_config,
        os.path.join(REPO_COPY, "docker/packager"),
        build_output_path,
        version.string,
        image_version,
        ccache_path,
        official_flag,
    )

    logging.info("Going to run packager with %s", packager_cmd)

    logs_path = os.path.join(TEMP_PATH, "build_log")
    if not os.path.exists(logs_path):
        os.makedirs(logs_path)

    start = time.time()
    log_path, success = build_clickhouse(packager_cmd, logs_path,
                                         build_output_path)
    elapsed = int(time.time() - start)
    subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {build_output_path}",
                          shell=True)
    subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {ccache_path}",
                          shell=True)
    logging.info("Build finished with %s, log path %s", success, log_path)

    # Upload the ccache first to have the least build time in case of problems
    logging.info("Will upload cache")
    upload_ccache(ccache_path, s3_helper, pr_info.number, TEMP_PATH)

    # FIXME performance
    performance_urls = []
    performance_path = os.path.join(build_output_path, "performance.tgz")
    if os.path.exists(performance_path):
        performance_urls.append(
            s3_helper.upload_build_file_to_s3(performance_path,
                                              s3_performance_path))
        logging.info(
            "Uploaded performance.tgz to %s, now delete to avoid duplication",
            performance_urls[0],
        )
        os.remove(performance_path)

    build_urls = (s3_helper.upload_build_folder_to_s3(
        build_output_path,
        s3_path_prefix,
        keep_dirs_in_s3_path=False,
        upload_symlinks=False,
    ) + performance_urls)
    logging.info("Got build URLs %s", build_urls)

    print("::notice ::Build URLs: {}".format("\n".join(build_urls)))

    if os.path.exists(log_path):
        log_url = s3_helper.upload_build_file_to_s3(
            log_path, s3_path_prefix + "/" + os.path.basename(log_path))
        logging.info("Log url %s", log_url)
    else:
        logging.info("Build log doesn't exist")

    print(f"::notice ::Log URL: {log_url}")

    create_json_artifact(TEMP_PATH, build_name, log_url, build_urls,
                         build_config, elapsed, success)

    upload_master_static_binaries(pr_info, build_config, s3_helper,
                                  build_output_path)
    # Fail build job if not successeded
    if not success:
        sys.exit(1)
Ejemplo n.º 4
0
 def read_version(self):
     self._git.update()
     self.version = get_version_from_repo(git=self._git)
Ejemplo n.º 5
0
def parse_args() -> argparse.Namespace:
    parser = argparse.ArgumentParser(
        formatter_class=argparse.ArgumentDefaultsHelpFormatter,
        description="A program to build clickhouse-server image, both alpine and "
        "ubuntu versions",
    )

    parser.add_argument(
        "--version",
        type=version_arg,
        default=get_version_from_repo(git=git).string,
        help="a version to build, automaticaly got from version_helper, accepts either "
        "tag ('refs/tags/' is removed automatically) or a normal 22.2.2.2 format",
    )
    parser.add_argument(
        "--release-type",
        type=str,
        choices=("auto", "latest", "major", "minor", "patch", "head"),
        default="head",
        help="version part that will be updated when '--version' is set; "
        "'auto' is a special case, it will get versions from github and detect the "
        "release type (latest, major, minor or patch) automatically",
    )
    parser.add_argument(
        "--image-path",
        type=str,
        default="docker/server",
        help="a path to docker context directory",
    )
    parser.add_argument(
        "--image-repo",
        type=str,
        default="clickhouse/clickhouse-server",
        help="image name on docker hub",
    )
    parser.add_argument(
        "--bucket-prefix",
        help="if set, then is used as source for deb and tgz files",
    )
    parser.add_argument("--reports", default=True, help=argparse.SUPPRESS)
    parser.add_argument(
        "--no-reports",
        action="store_false",
        dest="reports",
        default=argparse.SUPPRESS,
        help="don't push reports to S3 and github",
    )
    parser.add_argument("--push", default=True, help=argparse.SUPPRESS)
    parser.add_argument(
        "--no-push-images",
        action="store_false",
        dest="push",
        default=argparse.SUPPRESS,
        help="don't push images to docker hub",
    )
    parser.add_argument("--os", default=["ubuntu", "alpine"], help=argparse.SUPPRESS)
    parser.add_argument(
        "--no-ubuntu",
        action=DelOS,
        nargs=0,
        default=argparse.SUPPRESS,
        help="don't build ubuntu image",
    )
    parser.add_argument(
        "--no-alpine",
        action=DelOS,
        nargs=0,
        default=argparse.SUPPRESS,
        help="don't build alpine image",
    )

    return parser.parse_args()
Ejemplo n.º 6
0
        os.makedirs(TEMP_PATH)

    result_path = os.path.join(TEMP_PATH, "result_path")
    if not os.path.exists(result_path):
        os.makedirs(result_path)

    instances = prepare_autoscaling_group_and_get_hostnames()
    nodes_path = save_nodes_to_file(instances, TEMP_PATH)

    # always use latest
    docker_image = IMAGE_NAME

    build_name = get_build_name_for_check(CHECK_NAME)

    if pr_info.number == 0:
        version = get_version_from_repo()
        release_or_pr = f"{version.major}.{version.minor}"
    else:
        # PR number for anything else
        release_or_pr = str(pr_info.number)

    # This check run separately from other checks because it requires exclusive
    # run (see .github/workflows/jepsen.yml) So we cannot add explicit
    # dependency on a build job and using busy loop on it's results. For the
    # same reason we are using latest docker image.
    build_url = f"https://s3.amazonaws.com/clickhouse-builds/{release_or_pr}/{pr_info.sha}/{build_name}/clickhouse"
    head = requests.head(build_url)
    counter = 0
    while head.status_code != 200:
        time.sleep(10)
        head = requests.head(build_url)
Ejemplo n.º 7
0
 def update(self):
     self._git.update()
     self.version = get_version_from_repo()
Ejemplo n.º 8
0
def main():
    logging.basicConfig(level=logging.INFO)
    args = parse_args()
    release = Release(get_version_from_repo())

    release.do(args)
Ejemplo n.º 9
0
        try:
            logging.info(f"Pulling image {image_name}:{image_version}")
            subprocess.check_output(
                f"docker pull {image_name}:{image_version}",
                stderr=subprocess.STDOUT,
                shell=True)
            break
        except Exception as ex:
            time.sleep(i * 3)
            logging.info("Got execption pulling docker %s", ex)
    else:
        raise Exception(
            f"Cannot pull dockerhub for image docker pull {image_name}:{image_version}"
        )

    version = get_version_from_repo(repo_path)
    version.tweak_update()
    update_version_local(repo_path, pr_info.sha, version)

    build_name = build_config_to_string(build_config)
    logging.info(f"Build short name {build_name}")
    subprocess.check_call(
        f"echo 'BUILD_NAME=build_urls_{build_name}' >> $GITHUB_ENV",
        shell=True)

    build_output_path = os.path.join(temp_path, build_name)
    if not os.path.exists(build_output_path):
        os.makedirs(build_output_path)

    ccache_path = os.path.join(caches_path, build_name + '_ccache')
    if not os.path.exists(ccache_path):
Ejemplo n.º 10
0
        os.makedirs(TEMP_PATH)

    result_path = os.path.join(TEMP_PATH, "result_path")
    if not os.path.exists(result_path):
        os.makedirs(result_path)

    instances = prepare_autoscaling_group_and_get_hostnames()
    nodes_path = save_nodes_to_file(instances, TEMP_PATH)

    # always use latest
    docker_image = IMAGE_NAME

    build_name = get_build_name_for_check(CHECK_NAME)

    if pr_info.number == 0:
        version = get_version_from_repo(REPO_COPY)
        release_or_pr = ".".join(version.as_tuple()[:2])
    else:
        # PR number for anything else
        release_or_pr = str(pr_info.number)

    # This check run separately from other checks because it requires exclusive
    # run (see .github/workflows/jepsen.yml) So we cannot add explicit
    # dependency on a build job and using busy loop on it's results. For the
    # same reason we are using latest docker image.
    build_url = f"https://s3.amazonaws.com/clickhouse-builds/{release_or_pr}/{pr_info.sha}/{build_name}/clickhouse"
    head = requests.head(build_url)
    counter = 0
    while head.status_code != 200:
        time.sleep(10)
        head = requests.head(build_url)