Ejemplo n.º 1
0
    logging.info("Going to run command %s", run_command)
    with subprocess.Popen(run_command, shell=True) as process:
        retcode = process.wait()
        if retcode == 0:
            logging.info("Run successfully")
        else:
            logging.info("Run failed")

    subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {temp_path}",
                          shell=True)
    print("Result path", os.listdir(result_path))
    print("Server log path", os.listdir(server_log_path))

    state, description, test_results, additional_logs = process_result(
        result_path, server_log_path)

    ch_helper = ClickHouseHelper()
    s3_helper = S3Helper('https://s3.amazonaws.com')
    report_url = upload_results(s3_helper, pr_info.number, pr_info.sha,
                                test_results, additional_logs, CHECK_NAME)
    print(f"::notice ::Report url: {report_url}")
    post_commit_status(gh, pr_info.sha, CHECK_NAME, description, state,
                       report_url)

    prepared_events = prepare_tests_results_for_clickhouse(
        pr_info, test_results, state, stopwatch.duration_seconds,
        stopwatch.start_time_str, report_url, CHECK_NAME)
    ch_helper.insert_events_into(db="gh-data",
                                 table="checks",
                                 events=prepared_events)
Ejemplo n.º 2
0
    run_log_path = os.path.join(temp_path, "runlog.log")

    with TeePopen(run_command, run_log_path) as process:
        retcode = process.wait()
        if retcode == 0:
            logging.info("Run successfully")
        else:
            logging.info("Run failed")

    subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {temp_path}",
                          shell=True)

    report_path = os.path.join(result_path, "html_report")
    logging.info("Report path %s", report_path)
    s3_path_prefix = "codebrowser"
    html_urls = s3_helper.fast_parallel_upload_dir(report_path, s3_path_prefix,
                                                   'clickhouse-test-reports')

    index_html = '<a href="https://s3.amazonaws.com/clickhouse-test-reports/codebrowser/index.html">HTML report</a>'

    test_results = [(index_html, "Look at the report")]

    report_url = upload_results(s3_helper, 0, os.getenv("GITHUB_SHA"),
                                test_results, [], NAME)

    print(f"::notice ::Report url: {report_url}")

    post_commit_status(gh, os.getenv("GITHUB_SHA"), NAME, "Report built",
                       "success", report_url)
Ejemplo n.º 3
0
def main():
    logging.basicConfig(level=logging.INFO)
    stopwatch = Stopwatch()

    args = parse_args()
    if args.suffix:
        global NAME
        NAME += f" {args.suffix}"
        changed_json = os.path.join(TEMP_PATH,
                                    f"changed_images_{args.suffix}.json")
    else:
        changed_json = os.path.join(TEMP_PATH, "changed_images.json")

    push = not args.no_push_images
    if push:
        subprocess.check_output(  # pylint: disable=unexpected-keyword-arg
            "docker login --username 'robotclickhouse' --password-stdin",
            input=get_parameter_from_ssm("dockerhub_robot_password"),
            encoding="utf-8",
            shell=True,
        )

    if os.path.exists(TEMP_PATH):
        shutil.rmtree(TEMP_PATH)
    os.makedirs(TEMP_PATH)

    if args.image_path:
        pr_info = PRInfo()
        pr_info.changed_files = set(i for i in args.image_path)
    else:
        pr_info = PRInfo(need_changed_files=True)

    changed_images = get_changed_docker_images(pr_info, GITHUB_WORKSPACE,
                                               "docker/images.json")
    logging.info("Has changed images %s",
                 ", ".join([im.path for im in changed_images]))

    image_versions, result_version = gen_versions(pr_info, args.suffix)

    result_images = {}
    images_processing_result = []
    for image in changed_images:
        images_processing_result += process_image_with_parents(
            image, image_versions, push)
        result_images[image.repo] = result_version

    if changed_images:
        description = "Updated " + ",".join([im.repo for im in changed_images])
    else:
        description = "Nothing to update"

    if len(description) >= 140:
        description = description[:136] + "..."

    with open(changed_json, "w", encoding="utf-8") as images_file:
        json.dump(result_images, images_file)

    s3_helper = S3Helper("https://s3.amazonaws.com")

    s3_path_prefix = (str(pr_info.number) + "/" + pr_info.sha + "/" +
                      NAME.lower().replace(" ", "_"))
    status, test_results = process_test_results(s3_helper,
                                                images_processing_result,
                                                s3_path_prefix)

    url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results,
                         [], NAME)

    print(f"::notice ::Report url: {url}")
    print(f'::set-output name=url_output::"{url}"')

    if args.no_reports:
        return

    gh = Github(get_best_robot_token())
    post_commit_status(gh, pr_info.sha, NAME, description, status, url)

    prepared_events = prepare_tests_results_for_clickhouse(
        pr_info,
        test_results,
        status,
        stopwatch.duration_seconds,
        stopwatch.start_time_str,
        url,
        NAME,
    )
    ch_helper = ClickHouseHelper()
    ch_helper.insert_events_into(db="gh-data",
                                 table="checks",
                                 events=prepared_events)
Ejemplo n.º 4
0
    for f in test_output_files:
        additional_logs.append(os.path.join(output_path, f))

    test_log_exists = 'test_log.txt' in test_output_files or 'test_result.txt' in test_output_files
    test_result_exists = 'test_results.tsv' in test_output_files
    test_results = []
    if 'submodule_log.txt' not in test_output_files:
        description = "Cannot clone repository"
        state = "failure"
    elif 'cmake_log.txt' not in test_output_files:
        description = "Cannot fetch submodules"
        state = "failure"
    elif 'build_log.txt' not in test_output_files:
        description = "Cannot finish cmake"
        state = "failure"
    elif 'install_log.txt' not in test_output_files:
        description = "Cannot build ClickHouse"
        state = "failure"
    elif not test_log_exists and not test_result_exists:
        description = "Cannot install or start ClickHouse"
        state = "failure"
    else:
        state, description, test_results, additional_logs = process_results(
            output_path)

    report_url = upload_results(s3_helper, pr_info.number, pr_info.sha,
                                test_results, [run_log_path] + additional_logs,
                                NAME, True)
    print("::notice ::Report url: {}".format(report_url))
    post_commit_status(gh, pr_info.sha, NAME, description, state, report_url)
Ejemplo n.º 5
0
        status = "failure"
    else:
        for f in files:
            path = os.path.join(test_output, f)
            additional_files.append(path)
            with open(path, "r", encoding="utf-8") as check_file:
                for line in check_file:
                    if "ERROR" in line:
                        lines.append((line.split(":")[-1], "FAIL"))
        if lines:
            status = "failure"
            description = "Found errors in docs"
        elif status != "failure":
            lines.append(("No errors found", "OK"))
        else:
            lines.append(("Non zero exit code", "FAIL"))

    s3_helper = S3Helper("https://s3.amazonaws.com")

    report_url = upload_results(s3_helper, pr_info.number, pr_info.sha, lines,
                                additional_files, NAME)
    print("::notice ::Report url: {report_url}")
    commit = get_commit(gh, pr_info.sha)
    commit.create_status(context=NAME,
                         description=description,
                         state=status,
                         target_url=report_url)

    if status == "failure":
        sys.exit(1)
Ejemplo n.º 6
0
    else:
        description = "Nothing to update"

    if len(description) >= 140:
        description = description[:136] + "..."

    s3_helper = S3Helper('https://s3.amazonaws.com')

    s3_path_prefix = str(
        pr_info.number) + "/" + pr_info.sha + "/" + NAME.lower().replace(
            ' ', '_')
    status, test_results = process_test_results(s3_helper,
                                                images_processing_result,
                                                s3_path_prefix)

    url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results,
                         [], NAME)

    with open(os.path.join(temp_path, 'changed_images.json'),
              'w') as images_file:
        json.dump(result_images, images_file)

    print("::notice ::Report url: {}".format(url))
    print("::set-output name=url_output::\"{}\"".format(url))
    gh = Github(get_best_robot_token())
    commit = get_commit(gh, pr_info.sha)
    commit.create_status(context=NAME,
                         description=description,
                         state=status,
                         target_url=url)
def main():
    logging.basicConfig(level=logging.INFO)
    stopwatch = Stopwatch()

    args = parse_args()
    push = not args.no_push_images
    if push:
        subprocess.check_output(  # pylint: disable=unexpected-keyword-arg
            "docker login --username 'robotclickhouse' --password-stdin",
            input=get_parameter_from_ssm("dockerhub_robot_password"),
            encoding="utf-8",
            shell=True,
        )

    to_merge = {}
    for suf in args.suffixes:
        to_merge[suf] = load_images(args.path, suf)

    changed_images = get_changed_images(check_sources(to_merge))

    os.environ["DOCKER_CLI_EXPERIMENTAL"] = "enabled"
    merged = merge_images(to_merge)

    status = "success"
    test_results = []  # type: List[Tuple[str, str]]
    for image, versions in merged.items():
        for tags in versions:
            manifest, test_result = create_manifest(image, tags, push)
            test_results.append((manifest, test_result))
            if test_result != "OK":
                status = "failure"

    with open(os.path.join(args.path, "changed_images.json"), "w") as ci:
        json.dump(changed_images, ci)

    pr_info = PRInfo()
    s3_helper = S3Helper("https://s3.amazonaws.com")

    url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results,
                         [], NAME)

    print("::notice ::Report url: {}".format(url))
    print('::set-output name=url_output::"{}"'.format(url))

    if args.no_reports:
        return

    if changed_images:
        description = "Updated " + ", ".join(changed_images.keys())
    else:
        description = "Nothing to update"

    if len(description) >= 140:
        description = description[:136] + "..."

    gh = Github(get_best_robot_token())
    post_commit_status(gh, pr_info.sha, NAME, description, status, url)

    prepared_events = prepare_tests_results_for_clickhouse(
        pr_info,
        test_results,
        status,
        stopwatch.duration_seconds,
        stopwatch.start_time_str,
        url,
        NAME,
    )
    ch_helper = ClickHouseHelper()
    ch_helper.insert_events_into(db="gh-data",
                                 table="checks",
                                 events=prepared_events)
Ejemplo n.º 8
0
    subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {temp_path}", shell=True)

    s3_helper = S3Helper("https://s3.amazonaws.com")

    state, description, test_results, additional_logs = process_results(
        result_path, server_log_path
    )
    state = override_status(state, check_name, validate_bugix_check)

    ch_helper = ClickHouseHelper()
    mark_flaky_tests(ch_helper, check_name, test_results)

    report_url = upload_results(
        s3_helper,
        pr_info.number,
        pr_info.sha,
        test_results,
        [run_log_path] + additional_logs,
        check_name_with_group,
    )

    print(f"::notice:: {check_name} Report url: {report_url}")
    if args.post_commit_status == "commit_status":
        post_commit_status(
            gh, pr_info.sha, check_name_with_group, description, state, report_url
        )
    elif args.post_commit_status == "file":
        post_commit_status_to_file(
            os.path.join(temp_path, "post_commit_status.tsv"),
            description,
            state,
            report_url,
Ejemplo n.º 9
0
        else:
            logging.info("Run failed")

    subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {temp_path}",
                          shell=True)

    s3_helper = S3Helper("https://s3.amazonaws.com")
    state, description, test_results, additional_logs = process_results(
        result_path, server_log_path, run_log_path)
    ch_helper = ClickHouseHelper()
    mark_flaky_tests(ch_helper, check_name, test_results)

    report_url = upload_results(
        s3_helper,
        pr_info.number,
        pr_info.sha,
        test_results,
        additional_logs,
        check_name,
    )
    print(f"::notice ::Report url: {report_url}")

    post_commit_status(gh, pr_info.sha, check_name, description, state,
                       report_url)

    prepared_events = prepare_tests_results_for_clickhouse(
        pr_info,
        test_results,
        state,
        stopwatch.duration_seconds,
        stopwatch.start_time_str,
        report_url,
Ejemplo n.º 10
0
def main():
    logging.basicConfig(level=logging.INFO)
    stopwatch = Stopwatch()
    makedirs(TEMP_PATH, exist_ok=True)

    args = parse_args()
    image = DockerImage(args.image_path, args.image_repo, False)
    args.release_type = auto_release_type(args.version, args.release_type)
    tags = gen_tags(args.version, args.release_type)
    NAME = f"Docker image {image.repo} building check"
    pr_info = None
    if CI:
        pr_info = PRInfo()
        release_or_pr, _ = get_release_or_pr(pr_info, args.version)
        args.bucket_prefix = (
            f"{S3_DOWNLOAD}/{S3_BUILDS_BUCKET}/{release_or_pr}/{pr_info.sha}"
        )

    if args.push:
        subprocess.check_output(  # pylint: disable=unexpected-keyword-arg
            "docker login --username 'robotclickhouse' --password-stdin",
            input=get_parameter_from_ssm("dockerhub_robot_password"),
            encoding="utf-8",
            shell=True,
        )
        NAME = f"Docker image {image.repo} build and push"

    logging.info("Following tags will be created: %s", ", ".join(tags))
    status = "success"
    test_results = []  # type: List[Tuple[str, str]]
    for os in args.os:
        for tag in tags:
            test_results.extend(
                build_and_push_image(
                    image, args.push, args.bucket_prefix, os, tag, args.version
                )
            )
            if test_results[-1][1] != "OK":
                status = "failure"

    pr_info = pr_info or PRInfo()
    s3_helper = S3Helper()

    url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results, [], NAME)

    print(f"::notice ::Report url: {url}")
    print(f'::set-output name=url_output::"{url}"')

    if not args.reports:
        return

    description = f"Processed tags: {', '.join(tags)}"

    if len(description) >= 140:
        description = description[:136] + "..."

    gh = Github(get_best_robot_token(), per_page=100)
    post_commit_status(gh, pr_info.sha, NAME, description, status, url)

    prepared_events = prepare_tests_results_for_clickhouse(
        pr_info,
        test_results,
        status,
        stopwatch.duration_seconds,
        stopwatch.start_time_str,
        url,
        NAME,
    )
    ch_helper = ClickHouseHelper()
    ch_helper.insert_events_into(db="default", table="checks", events=prepared_events)
    if status != "success":
        sys.exit(1)
Ejemplo n.º 11
0
            os.path.join(result_path, "store"),
            os.path.join(result_path, "jepsen_store.tar.gz"),
        )
        additional_data.append(os.path.join(result_path,
                                            "jepsen_store.tar.gz"))
    except Exception as ex:
        print("Exception", ex)
        status = "failure"
        description = "No Jepsen output log"
        test_result = [("No Jepsen output log", "FAIL")]

    s3_helper = S3Helper("https://s3.amazonaws.com")
    report_url = upload_results(
        s3_helper,
        pr_info.number,
        pr_info.sha,
        test_result,
        [run_log_path] + additional_data,
        CHECK_NAME,
    )

    print(f"::notice ::Report url: {report_url}")
    post_commit_status(gh, pr_info.sha, CHECK_NAME, description, status,
                       report_url)

    ch_helper = ClickHouseHelper()
    prepared_events = prepare_tests_results_for_clickhouse(
        pr_info,
        test_result,
        status,
        stopwatch.duration_seconds,
        stopwatch.start_time_str,
Ejemplo n.º 12
0
    with open(json_path, 'w', encoding='utf-8') as json_params:
        json_params.write(json.dumps(get_json_params_dict(check_name, pr_info, images_with_versions)))

    output_path_log = os.path.join(result_path, "main_script_log.txt")

    runner_path = os.path.join(repo_path, "tests/integration", "ci-runner.py")
    run_command = f"sudo -E {runner_path} | tee {output_path_log}"

    with open(output_path_log, 'w', encoding='utf-8') as log:
        with subprocess.Popen(run_command, shell=True, stderr=log, stdout=log, env=my_env) as process:
            retcode = process.wait()
            if retcode == 0:
                logging.info("Run tests successfully")
            else:
                logging.info("Some tests failed")

    subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {temp_path}", shell=True)

    state, description, test_results, additional_logs = process_results(result_path)

    ch_helper = ClickHouseHelper()
    mark_flaky_tests(ch_helper, check_name, test_results)

    s3_helper = S3Helper('https://s3.amazonaws.com')
    report_url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results, [output_path_log] + additional_logs, check_name, False)
    print(f"::notice ::Report url: {report_url}")
    post_commit_status(gh, pr_info.sha, check_name, description, state, report_url)

    prepared_events = prepare_tests_results_for_clickhouse(pr_info, test_results, state, stopwatch.duration_seconds, stopwatch.start_time_str, report_url, check_name)
    ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events)
Ejemplo n.º 13
0
def main():
    logging.basicConfig(level=logging.INFO)
    stopwatch = Stopwatch()

    args = parse_args()
    if args.suffix:
        global NAME
        NAME += f" {args.suffix}"
        changed_json = os.path.join(TEMP_PATH,
                                    f"changed_images_{args.suffix}.json")
    else:
        changed_json = os.path.join(TEMP_PATH, "changed_images.json")

    if args.push:
        subprocess.check_output(  # pylint: disable=unexpected-keyword-arg
            "docker login --username 'robotclickhouse' --password-stdin",
            input=get_parameter_from_ssm("dockerhub_robot_password"),
            encoding="utf-8",
            shell=True,
        )

    if os.path.exists(TEMP_PATH):
        shutil.rmtree(TEMP_PATH)
    os.makedirs(TEMP_PATH)

    images_dict = get_images_dict(GITHUB_WORKSPACE, "docker/images.json")

    pr_info = PRInfo()
    if args.all:
        pr_info.changed_files = set(images_dict.keys())
    elif args.image_path:
        pr_info.changed_files = set(i for i in args.image_path)
    else:
        try:
            pr_info.fetch_changed_files()
        except TypeError:
            # If the event does not contain diff, nothing will be built
            pass

    changed_images = get_changed_docker_images(pr_info, images_dict)
    if changed_images:
        logging.info("Has changed images: %s",
                     ", ".join([im.path for im in changed_images]))

    image_versions, result_version = gen_versions(pr_info, args.suffix)

    result_images = {}
    images_processing_result = []
    for image in changed_images:
        # If we are in backport PR, then pr_info.release_pr is defined
        # We use it as tag to reduce rebuilding time
        images_processing_result += process_image_with_parents(
            image, image_versions, pr_info.release_pr, args.push)
        result_images[image.repo] = result_version

    if changed_images:
        description = "Updated " + ",".join([im.repo for im in changed_images])
    else:
        description = "Nothing to update"

    if len(description) >= 140:
        description = description[:136] + "..."

    with open(changed_json, "w", encoding="utf-8") as images_file:
        json.dump(result_images, images_file)

    s3_helper = S3Helper()

    s3_path_prefix = (str(pr_info.number) + "/" + pr_info.sha + "/" +
                      NAME.lower().replace(" ", "_"))
    status, test_results = process_test_results(s3_helper,
                                                images_processing_result,
                                                s3_path_prefix)

    url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results,
                         [], NAME)

    print(f"::notice ::Report url: {url}")
    print(f'::set-output name=url_output::"{url}"')

    if not args.reports:
        return

    gh = Github(get_best_robot_token(), per_page=100)
    post_commit_status(gh, pr_info.sha, NAME, description, status, url)

    prepared_events = prepare_tests_results_for_clickhouse(
        pr_info,
        test_results,
        status,
        stopwatch.duration_seconds,
        stopwatch.start_time_str,
        url,
        NAME,
    )
    ch_helper = ClickHouseHelper()
    ch_helper.insert_events_into(db="default",
                                 table="checks",
                                 events=prepared_events)

    if status == "error":
        sys.exit(1)
Ejemplo n.º 14
0
def main():
    logging.basicConfig(level=logging.INFO)
    stopwatch = Stopwatch()

    args = parse_args()
    if args.suffix:
        global NAME
        NAME += f" {args.suffix}"
        changed_json = os.path.join(TEMP_PATH,
                                    f"changed_images_{args.suffix}.json")
    else:
        changed_json = os.path.join(TEMP_PATH, "changed_images.json")

    push = not args.no_push_images
    if push:
        subprocess.check_output(  # pylint: disable=unexpected-keyword-arg
            "docker login --username 'robotclickhouse' --password-stdin",
            input=get_parameter_from_ssm("dockerhub_robot_password"),
            encoding="utf-8",
            shell=True,
        )

    repo_path = GITHUB_WORKSPACE

    if os.path.exists(TEMP_PATH):
        shutil.rmtree(TEMP_PATH)
    os.makedirs(TEMP_PATH)

    if args.image_path:
        pr_info = PRInfo()
        pr_info.changed_files = set(i for i in args.image_path)
    else:
        pr_info = PRInfo(need_changed_files=True)

    changed_images = get_changed_docker_images(pr_info, repo_path,
                                               "docker/images.json")
    logging.info("Has changed images %s",
                 ", ".join([str(image[0]) for image in changed_images]))
    pr_commit_version = str(pr_info.number) + "-" + pr_info.sha
    # The order is important, PR number is used as cache during the build
    versions = [str(pr_info.number), pr_commit_version]
    result_version = pr_commit_version
    if pr_info.number == 0:
        # First get the latest for cache
        versions.insert(0, "latest")

    if args.suffix:
        # We should build architecture specific images separately and merge a
        # manifest lately in a different script
        versions = [f"{v}-{args.suffix}" for v in versions]
        # changed_images_{suffix}.json should contain all changed images
        result_version = versions

    result_images = {}
    images_processing_result = []
    for rel_path, image_name in changed_images:
        full_path = os.path.join(repo_path, rel_path)
        images_processing_result += process_single_image(
            versions, full_path, image_name, push)
        result_images[image_name] = result_version

    if changed_images:
        description = "Updated " + ",".join([im[1] for im in changed_images])
    else:
        description = "Nothing to update"

    if len(description) >= 140:
        description = description[:136] + "..."

    with open(changed_json, "w") as images_file:
        json.dump(result_images, images_file)

    s3_helper = S3Helper("https://s3.amazonaws.com")

    s3_path_prefix = (str(pr_info.number) + "/" + pr_info.sha + "/" +
                      NAME.lower().replace(" ", "_"))
    status, test_results = process_test_results(s3_helper,
                                                images_processing_result,
                                                s3_path_prefix)

    url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results,
                         [], NAME)

    print("::notice ::Report url: {}".format(url))
    print('::set-output name=url_output::"{}"'.format(url))

    if args.no_reports:
        return

    gh = Github(get_best_robot_token())
    post_commit_status(gh, pr_info.sha, NAME, description, status, url)

    prepared_events = prepare_tests_results_for_clickhouse(
        pr_info,
        test_results,
        status,
        stopwatch.duration_seconds,
        stopwatch.start_time_str,
        url,
        NAME,
    )
    ch_helper = ClickHouseHelper()
    ch_helper.insert_events_into(db="gh-data",
                                 table="checks",
                                 events=prepared_events)