Esempio n. 1
0
def main():
    logging.basicConfig(level=logging.INFO)

    build_name = sys.argv[1]

    build_config = CI_CONFIG["build_config"][build_name]

    if not os.path.exists(TEMP_PATH):
        os.makedirs(TEMP_PATH)

    pr_info = PRInfo()

    logging.info("Repo copy path %s", REPO_COPY)

    s3_helper = S3Helper()

    version = get_version_from_repo(git=Git(True))
    release_or_pr, performance_pr = get_release_or_pr(pr_info, version)

    s3_path_prefix = "/".join((release_or_pr, pr_info.sha, build_name))
    # FIXME performance
    s3_performance_path = "/".join(
        (performance_pr, pr_info.sha, build_name, "performance.tgz"))

    # If this is rerun, then we try to find already created artifacts and just
    # put them as github actions artifact (result)
    check_for_success_run(s3_helper, s3_path_prefix, build_name, build_config)

    docker_image = get_image_with_version(IMAGES_PATH, IMAGE_NAME)
    image_version = docker_image.version

    logging.info("Got version from repo %s", version.string)

    official_flag = pr_info.number == 0
    if "official" in build_config:
        official_flag = build_config["official"]

    version_type = "testing"
    if "release" in pr_info.labels or "release-lts" in pr_info.labels:
        version_type = "stable"
        official_flag = True

    update_version_local(version, version_type)

    logging.info("Updated local files with version")

    logging.info("Build short name %s", build_name)

    build_output_path = os.path.join(TEMP_PATH, build_name)
    if not os.path.exists(build_output_path):
        os.makedirs(build_output_path)

    ccache_path = os.path.join(CACHES_PATH, build_name + "_ccache")

    logging.info("Will try to fetch cache for our build")
    try:
        get_ccache_if_not_exists(ccache_path, s3_helper, pr_info.number,
                                 TEMP_PATH)
    except Exception as e:
        # In case there are issues with ccache, remove the path and do not fail a build
        logging.info("Failed to get ccache, building without it. Error: %s", e)
        rmtree(ccache_path, ignore_errors=True)

    if not os.path.exists(ccache_path):
        logging.info("cache was not fetched, will create empty dir")
        os.makedirs(ccache_path)

    packager_cmd = get_packager_cmd(
        build_config,
        os.path.join(REPO_COPY, "docker/packager"),
        build_output_path,
        version.string,
        image_version,
        ccache_path,
        official_flag,
    )

    logging.info("Going to run packager with %s", packager_cmd)

    logs_path = os.path.join(TEMP_PATH, "build_log")
    if not os.path.exists(logs_path):
        os.makedirs(logs_path)

    start = time.time()
    log_path, success = build_clickhouse(packager_cmd, logs_path,
                                         build_output_path)
    elapsed = int(time.time() - start)
    subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {build_output_path}",
                          shell=True)
    subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {ccache_path}",
                          shell=True)
    logging.info("Build finished with %s, log path %s", success, log_path)

    # Upload the ccache first to have the least build time in case of problems
    logging.info("Will upload cache")
    upload_ccache(ccache_path, s3_helper, pr_info.number, TEMP_PATH)

    # FIXME performance
    performance_urls = []
    performance_path = os.path.join(build_output_path, "performance.tgz")
    if os.path.exists(performance_path):
        performance_urls.append(
            s3_helper.upload_build_file_to_s3(performance_path,
                                              s3_performance_path))
        logging.info(
            "Uploaded performance.tgz to %s, now delete to avoid duplication",
            performance_urls[0],
        )
        os.remove(performance_path)

    build_urls = (s3_helper.upload_build_folder_to_s3(
        build_output_path,
        s3_path_prefix,
        keep_dirs_in_s3_path=False,
        upload_symlinks=False,
    ) + performance_urls)
    logging.info("Got build URLs %s", build_urls)

    print("::notice ::Build URLs: {}".format("\n".join(build_urls)))

    if os.path.exists(log_path):
        log_url = s3_helper.upload_build_file_to_s3(
            log_path, s3_path_prefix + "/" + os.path.basename(log_path))
        logging.info("Log url %s", log_url)
    else:
        logging.info("Build log doesn't exist")

    print(f"::notice ::Log URL: {log_url}")

    create_json_artifact(TEMP_PATH, build_name, log_url, build_urls,
                         build_config, elapsed, success)

    upload_master_static_binaries(pr_info, build_config, s3_helper,
                                  build_output_path)
    # Fail build job if not successeded
    if not success:
        sys.exit(1)
Esempio n. 2
0
def main():
    logging.basicConfig(level=logging.INFO)

    build_check_name = sys.argv[1]
    build_name = sys.argv[2]

    build_config = get_build_config(build_check_name, build_name)

    if not os.path.exists(TEMP_PATH):
        os.makedirs(TEMP_PATH)

    pr_info = PRInfo()

    logging.info("Repo copy path %s", REPO_COPY)

    s3_helper = S3Helper("https://s3.amazonaws.com")

    version = get_version_from_repo(REPO_COPY)
    release_or_pr = get_release_or_pr(pr_info, build_config, version)

    s3_path_prefix = "/".join((release_or_pr, pr_info.sha, build_name))

    # If this is rerun, then we try to find already created artifacts and just
    # put them as github actions artifcat (result)
    build_results = get_build_results_if_exists(s3_helper, s3_path_prefix)
    if build_results is not None and len(build_results) > 0:
        logging.info("Some build results found %s", build_results)
        build_urls = []
        log_url = ""
        for url in build_results:
            if "build_log.log" in url:
                log_url = "https://s3.amazonaws.com/clickhouse-builds/" + url.replace(
                    "+", "%2B").replace(" ", "%20")
            else:
                build_urls.append(
                    "https://s3.amazonaws.com/clickhouse-builds/" +
                    url.replace("+", "%2B").replace(" ", "%20"))
        create_json_artifact(
            TEMP_PATH,
            build_name,
            log_url,
            build_urls,
            build_config,
            0,
            len(build_urls) > 0,
        )
        return

    image_name = get_image_name(build_config)
    docker_image = get_image_with_version(IMAGES_PATH, image_name)
    image_version = docker_image.version

    logging.info("Got version from repo %s", version.get_version_string())

    version_type = "testing"
    if "release" in pr_info.labels or "release-lts" in pr_info.labels:
        version_type = "stable"

    update_version_local(REPO_COPY, pr_info.sha, version, version_type)

    logging.info("Updated local files with version")

    logging.info("Build short name %s", build_name)

    build_output_path = os.path.join(TEMP_PATH, build_name)
    if not os.path.exists(build_output_path):
        os.makedirs(build_output_path)

    ccache_path = os.path.join(CACHES_PATH, build_name + "_ccache")

    logging.info("Will try to fetch cache for our build")
    get_ccache_if_not_exists(ccache_path, s3_helper, pr_info.number, TEMP_PATH)

    if not os.path.exists(ccache_path):
        logging.info("cache was not fetched, will create empty dir")
        os.makedirs(ccache_path)

    if build_config["package_type"] == "performance" and pr_info.number != 0:
        # because perf tests store some information about git commits
        subprocess.check_call(
            f"cd {REPO_COPY} && git fetch origin master:master", shell=True)

    packager_cmd = get_packager_cmd(
        build_config,
        os.path.join(REPO_COPY, "docker/packager"),
        build_output_path,
        version.get_version_string(),
        image_version,
        ccache_path,
        pr_info,
    )
    logging.info("Going to run packager with %s", packager_cmd)

    build_clickhouse_log = os.path.join(TEMP_PATH, "build_log")
    if not os.path.exists(build_clickhouse_log):
        os.makedirs(build_clickhouse_log)

    start = time.time()
    log_path, success = build_clickhouse(packager_cmd, build_clickhouse_log,
                                         build_output_path)
    elapsed = int(time.time() - start)
    subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {build_output_path}",
                          shell=True)
    subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {ccache_path}",
                          shell=True)
    logging.info("Build finished with %s, log path %s", success, log_path)

    logging.info("Will upload cache")
    upload_ccache(ccache_path, s3_helper, pr_info.number, TEMP_PATH)

    if os.path.exists(log_path):
        log_url = s3_helper.upload_build_file_to_s3(
            log_path, s3_path_prefix + "/" + os.path.basename(log_path))
        logging.info("Log url %s", log_url)
    else:
        logging.info("Build log doesn't exist")

    build_urls = s3_helper.upload_build_folder_to_s3(
        build_output_path,
        s3_path_prefix,
        keep_dirs_in_s3_path=False,
        upload_symlinks=False,
    )
    logging.info("Got build URLs %s", build_urls)

    print("::notice ::Build URLs: {}".format("\n".join(build_urls)))

    print("::notice ::Log URL: {}".format(log_url))

    create_json_artifact(TEMP_PATH, build_name, log_url, build_urls,
                         build_config, elapsed, success)

    upload_master_static_binaries(pr_info, build_config, s3_helper,
                                  build_output_path)
    # Fail build job if not successeded
    if not success:
        sys.exit(1)
Esempio n. 3
0
    workspace = os.path.join(temp_path, "fasttest-workspace")
    if not os.path.exists(workspace):
        os.makedirs(workspace)

    output_path = os.path.join(temp_path, "fasttest-output")
    if not os.path.exists(output_path):
        os.makedirs(output_path)

    if not os.path.exists(CACHES_PATH):
        os.makedirs(CACHES_PATH)
    subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {CACHES_PATH}", shell=True)
    cache_path = os.path.join(CACHES_PATH, "fasttest")

    logging.info("Will try to fetch cache for our build")
    ccache_for_pr = get_ccache_if_not_exists(
        cache_path, s3_helper, pr_info.number, temp_path
    )
    upload_master_ccache = ccache_for_pr in (-1, 0)

    if not os.path.exists(cache_path):
        logging.info("cache was not fetched, will create empty dir")
        os.makedirs(cache_path)

    repo_path = os.path.join(temp_path, "fasttest-repo")
    if not os.path.exists(repo_path):
        os.makedirs(repo_path)

    run_cmd = get_fasttest_cmd(
        workspace,
        output_path,
        cache_path,