Exemple #1
0
def main():
    logging.basicConfig(level=logging.INFO)
    stopwatch = Stopwatch()

    args = parse_args()
    if args.suffix:
        global NAME
        NAME += f" {args.suffix}"
        changed_json = os.path.join(TEMP_PATH,
                                    f"changed_images_{args.suffix}.json")
    else:
        changed_json = os.path.join(TEMP_PATH, "changed_images.json")

    push = not args.no_push_images
    if push:
        subprocess.check_output(  # pylint: disable=unexpected-keyword-arg
            "docker login --username 'robotclickhouse' --password-stdin",
            input=get_parameter_from_ssm("dockerhub_robot_password"),
            encoding="utf-8",
            shell=True,
        )

    if os.path.exists(TEMP_PATH):
        shutil.rmtree(TEMP_PATH)
    os.makedirs(TEMP_PATH)

    if args.image_path:
        pr_info = PRInfo()
        pr_info.changed_files = set(i for i in args.image_path)
    else:
        pr_info = PRInfo(need_changed_files=True)

    changed_images = get_changed_docker_images(pr_info, GITHUB_WORKSPACE,
                                               "docker/images.json")
    logging.info("Has changed images %s",
                 ", ".join([im.path for im in changed_images]))

    image_versions, result_version = gen_versions(pr_info, args.suffix)

    result_images = {}
    images_processing_result = []
    for image in changed_images:
        images_processing_result += process_image_with_parents(
            image, image_versions, push)
        result_images[image.repo] = result_version

    if changed_images:
        description = "Updated " + ",".join([im.repo for im in changed_images])
    else:
        description = "Nothing to update"

    if len(description) >= 140:
        description = description[:136] + "..."

    with open(changed_json, "w", encoding="utf-8") as images_file:
        json.dump(result_images, images_file)

    s3_helper = S3Helper("https://s3.amazonaws.com")

    s3_path_prefix = (str(pr_info.number) + "/" + pr_info.sha + "/" +
                      NAME.lower().replace(" ", "_"))
    status, test_results = process_test_results(s3_helper,
                                                images_processing_result,
                                                s3_path_prefix)

    url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results,
                         [], NAME)

    print(f"::notice ::Report url: {url}")
    print(f'::set-output name=url_output::"{url}"')

    if args.no_reports:
        return

    gh = Github(get_best_robot_token())
    post_commit_status(gh, pr_info.sha, NAME, description, status, url)

    prepared_events = prepare_tests_results_for_clickhouse(
        pr_info,
        test_results,
        status,
        stopwatch.duration_seconds,
        stopwatch.start_time_str,
        url,
        NAME,
    )
    ch_helper = ClickHouseHelper()
    ch_helper.insert_events_into(db="gh-data",
                                 table="checks",
                                 events=prepared_events)
    logging.info("Going to run command %s", run_command)
    with subprocess.Popen(run_command, shell=True) as process:
        retcode = process.wait()
        if retcode == 0:
            logging.info("Run successfully")
        else:
            logging.info("Run failed")

    subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {temp_path}",
                          shell=True)
    print("Result path", os.listdir(result_path))
    print("Server log path", os.listdir(server_log_path))

    state, description, test_results, additional_logs = process_result(
        result_path, server_log_path)

    ch_helper = ClickHouseHelper()
    s3_helper = S3Helper('https://s3.amazonaws.com')
    report_url = upload_results(s3_helper, pr_info.number, pr_info.sha,
                                test_results, additional_logs, CHECK_NAME)
    print(f"::notice ::Report url: {report_url}")
    post_commit_status(gh, pr_info.sha, CHECK_NAME, description, state,
                       report_url)

    prepared_events = prepare_tests_results_for_clickhouse(
        pr_info, test_results, state, stopwatch.duration_seconds,
        stopwatch.start_time_str, report_url, CHECK_NAME)
    ch_helper.insert_events_into(db="gh-data",
                                 table="checks",
                                 events=prepared_events)
def main():
    logging.basicConfig(level=logging.INFO)
    stopwatch = Stopwatch()

    args = parse_args()
    push = not args.no_push_images
    if push:
        subprocess.check_output(  # pylint: disable=unexpected-keyword-arg
            "docker login --username 'robotclickhouse' --password-stdin",
            input=get_parameter_from_ssm("dockerhub_robot_password"),
            encoding="utf-8",
            shell=True,
        )

    to_merge = {}
    for suf in args.suffixes:
        to_merge[suf] = load_images(args.path, suf)

    changed_images = get_changed_images(check_sources(to_merge))

    os.environ["DOCKER_CLI_EXPERIMENTAL"] = "enabled"
    merged = merge_images(to_merge)

    status = "success"
    test_results = []  # type: List[Tuple[str, str]]
    for image, versions in merged.items():
        for tags in versions:
            manifest, test_result = create_manifest(image, tags, push)
            test_results.append((manifest, test_result))
            if test_result != "OK":
                status = "failure"

    with open(os.path.join(args.path, "changed_images.json"), "w") as ci:
        json.dump(changed_images, ci)

    pr_info = PRInfo()
    s3_helper = S3Helper("https://s3.amazonaws.com")

    url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results,
                         [], NAME)

    print("::notice ::Report url: {}".format(url))
    print('::set-output name=url_output::"{}"'.format(url))

    if args.no_reports:
        return

    if changed_images:
        description = "Updated " + ", ".join(changed_images.keys())
    else:
        description = "Nothing to update"

    if len(description) >= 140:
        description = description[:136] + "..."

    gh = Github(get_best_robot_token())
    post_commit_status(gh, pr_info.sha, NAME, description, status, url)

    prepared_events = prepare_tests_results_for_clickhouse(
        pr_info,
        test_results,
        status,
        stopwatch.duration_seconds,
        stopwatch.start_time_str,
        url,
        NAME,
    )
    ch_helper = ClickHouseHelper()
    ch_helper.insert_events_into(db="gh-data",
                                 table="checks",
                                 events=prepared_events)
    run_log_path = os.path.join(temp_path, "runlog.log")

    with TeePopen(run_command, run_log_path) as process:
        retcode = process.wait()
        if retcode == 0:
            logging.info("Run successfully")
        else:
            logging.info("Run failed")

    subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {temp_path}",
                          shell=True)

    report_path = os.path.join(result_path, "html_report")
    logging.info("Report path %s", report_path)
    s3_path_prefix = "codebrowser"
    html_urls = s3_helper.fast_parallel_upload_dir(report_path, s3_path_prefix,
                                                   'clickhouse-test-reports')

    index_html = '<a href="https://s3.amazonaws.com/clickhouse-test-reports/codebrowser/index.html">HTML report</a>'

    test_results = [(index_html, "Look at the report")]

    report_url = upload_results(s3_helper, 0, os.getenv("GITHUB_SHA"),
                                test_results, [], NAME)

    print(f"::notice ::Report url: {report_url}")

    post_commit_status(gh, os.getenv("GITHUB_SHA"), NAME, "Report built",
                       "success", report_url)
def main():
    logging.basicConfig(level=logging.INFO)
    stopwatch = Stopwatch()
    makedirs(TEMP_PATH, exist_ok=True)

    args = parse_args()
    image = DockerImage(args.image_path, args.image_repo, False)
    args.release_type = auto_release_type(args.version, args.release_type)
    tags = gen_tags(args.version, args.release_type)
    NAME = f"Docker image {image.repo} building check"
    pr_info = None
    if CI:
        pr_info = PRInfo()
        release_or_pr, _ = get_release_or_pr(pr_info, args.version)
        args.bucket_prefix = (
            f"{S3_DOWNLOAD}/{S3_BUILDS_BUCKET}/{release_or_pr}/{pr_info.sha}"
        )

    if args.push:
        subprocess.check_output(  # pylint: disable=unexpected-keyword-arg
            "docker login --username 'robotclickhouse' --password-stdin",
            input=get_parameter_from_ssm("dockerhub_robot_password"),
            encoding="utf-8",
            shell=True,
        )
        NAME = f"Docker image {image.repo} build and push"

    logging.info("Following tags will be created: %s", ", ".join(tags))
    status = "success"
    test_results = []  # type: List[Tuple[str, str]]
    for os in args.os:
        for tag in tags:
            test_results.extend(
                build_and_push_image(
                    image, args.push, args.bucket_prefix, os, tag, args.version
                )
            )
            if test_results[-1][1] != "OK":
                status = "failure"

    pr_info = pr_info or PRInfo()
    s3_helper = S3Helper()

    url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results, [], NAME)

    print(f"::notice ::Report url: {url}")
    print(f'::set-output name=url_output::"{url}"')

    if not args.reports:
        return

    description = f"Processed tags: {', '.join(tags)}"

    if len(description) >= 140:
        description = description[:136] + "..."

    gh = Github(get_best_robot_token(), per_page=100)
    post_commit_status(gh, pr_info.sha, NAME, description, status, url)

    prepared_events = prepare_tests_results_for_clickhouse(
        pr_info,
        test_results,
        status,
        stopwatch.duration_seconds,
        stopwatch.start_time_str,
        url,
        NAME,
    )
    ch_helper = ClickHouseHelper()
    ch_helper.insert_events_into(db="default", table="checks", events=prepared_events)
    if status != "success":
        sys.exit(1)
    s3_helper = S3Helper()
    state, description, test_results, additional_logs = process_result(test_output)

    ch_helper = ClickHouseHelper()
    mark_flaky_tests(ch_helper, check_name, test_results)

    report_url = upload_results(
        s3_helper,
        pr_info.number,
        pr_info.sha,
        test_results,
        [run_log_path] + additional_logs,
        check_name,
    )
    print(f"::notice ::Report url: {report_url}")
    post_commit_status(gh, pr_info.sha, check_name, description, state, report_url)

    prepared_events = prepare_tests_results_for_clickhouse(
        pr_info,
        test_results,
        state,
        stopwatch.duration_seconds,
        stopwatch.start_time_str,
        report_url,
        check_name,
    )

    ch_helper.insert_events_into(db="default", table="checks", events=prepared_events)

    if state == "error":
        sys.exit(1)
        traceback.print_exc()
        status = "failure"
        message = "Failed to parse the report."

    if not status:
        status = "failure"
        message = "No status in report."
    elif not message:
        status = "failure"
        message = "No message in report."

    report_url = GITHUB_RUN_URL

    if paths["runlog.log"]:
        report_url = paths["runlog.log"]

    if paths["compare.log"]:
        report_url = paths["compare.log"]

    if paths["output.7z"]:
        report_url = paths["output.7z"]

    if paths["report.html"]:
        report_url = paths["report.html"]

    post_commit_status(gh, pr_info.sha, check_name_with_group, message, status,
                       report_url)

    if status == "error":
        sys.exit(1)
    run_log_path = os.path.join(temp_path, "runlog.log")

    with TeePopen(run_command, run_log_path) as process:
        retcode = process.wait()
        if retcode == 0:
            logging.info("Run successfully")
        else:
            logging.info("Run failed")

    subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {temp_path}",
                          shell=True)

    report_path = os.path.join(result_path, "html_report")
    logging.info("Report path %s", report_path)
    s3_path_prefix = "codebrowser"
    html_urls = s3_helper.fast_parallel_upload_dir(report_path, s3_path_prefix,
                                                   'clickhouse-test-reports')

    index_html = '<a href="https://s3.amazonaws.com/clickhouse-test-reports/codebrowser/index.html">HTML report</a>'

    test_results = [(index_html, "Look at the report")]

    report_url = upload_results(s3_helper, pr_info.number, pr_info.sha,
                                test_results, [], NAME)

    print(f"::notice ::Report url: {report_url}")

    post_commit_status(gh, pr_info.sha, NAME, "Report built", "success",
                       report_url)
Exemple #9
0
        # PR number for anything else
        release_or_pr = str(pr_info.number)

    # This check run separately from other checks because it requires exclusive
    # run (see .github/workflows/jepsen.yml) So we cannot add explicit
    # dependency on a build job and using busy loop on it's results. For the
    # same reason we are using latest docker image.
    build_url = f"https://s3.amazonaws.com/clickhouse-builds/{release_or_pr}/{pr_info.sha}/{build_name}/clickhouse"
    head = requests.head(build_url)
    counter = 0
    while head.status_code != 200:
        time.sleep(10)
        head = requests.head(build_url)
        counter += 1
        if counter >= 180:
            post_commit_status(gh, pr_info.sha, CHECK_NAME,
                               "Cannot fetch build to run", "error", "")
            raise Exception("Cannot fetch build")

    with SSHKey(key_value=get_parameter_from_ssm("jepsen_ssh_key") + '\n'):
        ssh_auth_sock = os.environ['SSH_AUTH_SOCK']
        auth_sock_dir = os.path.dirname(ssh_auth_sock)
        cmd = get_run_command(ssh_auth_sock, auth_sock_dir, pr_info,
                              nodes_path, REPO_COPY, build_url, result_path,
                              docker_image)
        logging.info("Going to run jepsen: %s", cmd)

        run_log_path = os.path.join(TEMP_PATH, "runlog.log")

        with TeePopen(cmd, run_log_path) as process:
            retcode = process.wait()
            if retcode == 0:
Exemple #10
0
def main():
    logging.basicConfig(level=logging.INFO)
    stopwatch = Stopwatch()

    args = parse_args()
    if args.suffix:
        global NAME
        NAME += f" {args.suffix}"
        changed_json = os.path.join(TEMP_PATH,
                                    f"changed_images_{args.suffix}.json")
    else:
        changed_json = os.path.join(TEMP_PATH, "changed_images.json")

    if args.push:
        subprocess.check_output(  # pylint: disable=unexpected-keyword-arg
            "docker login --username 'robotclickhouse' --password-stdin",
            input=get_parameter_from_ssm("dockerhub_robot_password"),
            encoding="utf-8",
            shell=True,
        )

    if os.path.exists(TEMP_PATH):
        shutil.rmtree(TEMP_PATH)
    os.makedirs(TEMP_PATH)

    images_dict = get_images_dict(GITHUB_WORKSPACE, "docker/images.json")

    pr_info = PRInfo()
    if args.all:
        pr_info.changed_files = set(images_dict.keys())
    elif args.image_path:
        pr_info.changed_files = set(i for i in args.image_path)
    else:
        try:
            pr_info.fetch_changed_files()
        except TypeError:
            # If the event does not contain diff, nothing will be built
            pass

    changed_images = get_changed_docker_images(pr_info, images_dict)
    if changed_images:
        logging.info("Has changed images: %s",
                     ", ".join([im.path for im in changed_images]))

    image_versions, result_version = gen_versions(pr_info, args.suffix)

    result_images = {}
    images_processing_result = []
    for image in changed_images:
        # If we are in backport PR, then pr_info.release_pr is defined
        # We use it as tag to reduce rebuilding time
        images_processing_result += process_image_with_parents(
            image, image_versions, pr_info.release_pr, args.push)
        result_images[image.repo] = result_version

    if changed_images:
        description = "Updated " + ",".join([im.repo for im in changed_images])
    else:
        description = "Nothing to update"

    if len(description) >= 140:
        description = description[:136] + "..."

    with open(changed_json, "w", encoding="utf-8") as images_file:
        json.dump(result_images, images_file)

    s3_helper = S3Helper()

    s3_path_prefix = (str(pr_info.number) + "/" + pr_info.sha + "/" +
                      NAME.lower().replace(" ", "_"))
    status, test_results = process_test_results(s3_helper,
                                                images_processing_result,
                                                s3_path_prefix)

    url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results,
                         [], NAME)

    print(f"::notice ::Report url: {url}")
    print(f'::set-output name=url_output::"{url}"')

    if not args.reports:
        return

    gh = Github(get_best_robot_token(), per_page=100)
    post_commit_status(gh, pr_info.sha, NAME, description, status, url)

    prepared_events = prepare_tests_results_for_clickhouse(
        pr_info,
        test_results,
        status,
        stopwatch.duration_seconds,
        stopwatch.start_time_str,
        url,
        NAME,
    )
    ch_helper = ClickHouseHelper()
    ch_helper.insert_events_into(db="default",
                                 table="checks",
                                 events=prepared_events)

    if status == "error":
        sys.exit(1)
Exemple #11
0
def main():
    logging.basicConfig(level=logging.INFO)
    stopwatch = Stopwatch()

    args = parse_args()
    if args.suffix:
        global NAME
        NAME += f" {args.suffix}"
        changed_json = os.path.join(TEMP_PATH,
                                    f"changed_images_{args.suffix}.json")
    else:
        changed_json = os.path.join(TEMP_PATH, "changed_images.json")

    push = not args.no_push_images
    if push:
        subprocess.check_output(  # pylint: disable=unexpected-keyword-arg
            "docker login --username 'robotclickhouse' --password-stdin",
            input=get_parameter_from_ssm("dockerhub_robot_password"),
            encoding="utf-8",
            shell=True,
        )

    repo_path = GITHUB_WORKSPACE

    if os.path.exists(TEMP_PATH):
        shutil.rmtree(TEMP_PATH)
    os.makedirs(TEMP_PATH)

    if args.image_path:
        pr_info = PRInfo()
        pr_info.changed_files = set(i for i in args.image_path)
    else:
        pr_info = PRInfo(need_changed_files=True)

    changed_images = get_changed_docker_images(pr_info, repo_path,
                                               "docker/images.json")
    logging.info("Has changed images %s",
                 ", ".join([str(image[0]) for image in changed_images]))
    pr_commit_version = str(pr_info.number) + "-" + pr_info.sha
    # The order is important, PR number is used as cache during the build
    versions = [str(pr_info.number), pr_commit_version]
    result_version = pr_commit_version
    if pr_info.number == 0:
        # First get the latest for cache
        versions.insert(0, "latest")

    if args.suffix:
        # We should build architecture specific images separately and merge a
        # manifest lately in a different script
        versions = [f"{v}-{args.suffix}" for v in versions]
        # changed_images_{suffix}.json should contain all changed images
        result_version = versions

    result_images = {}
    images_processing_result = []
    for rel_path, image_name in changed_images:
        full_path = os.path.join(repo_path, rel_path)
        images_processing_result += process_single_image(
            versions, full_path, image_name, push)
        result_images[image_name] = result_version

    if changed_images:
        description = "Updated " + ",".join([im[1] for im in changed_images])
    else:
        description = "Nothing to update"

    if len(description) >= 140:
        description = description[:136] + "..."

    with open(changed_json, "w") as images_file:
        json.dump(result_images, images_file)

    s3_helper = S3Helper("https://s3.amazonaws.com")

    s3_path_prefix = (str(pr_info.number) + "/" + pr_info.sha + "/" +
                      NAME.lower().replace(" ", "_"))
    status, test_results = process_test_results(s3_helper,
                                                images_processing_result,
                                                s3_path_prefix)

    url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results,
                         [], NAME)

    print("::notice ::Report url: {}".format(url))
    print('::set-output name=url_output::"{}"'.format(url))

    if args.no_reports:
        return

    gh = Github(get_best_robot_token())
    post_commit_status(gh, pr_info.sha, NAME, description, status, url)

    prepared_events = prepare_tests_results_for_clickhouse(
        pr_info,
        test_results,
        status,
        stopwatch.duration_seconds,
        stopwatch.start_time_str,
        url,
        NAME,
    )
    ch_helper = ClickHouseHelper()
    ch_helper.insert_events_into(db="gh-data",
                                 table="checks",
                                 events=prepared_events)