else:
        pr_link = f"https://github.com/ClickHouse/ClickHouse/pull/{pr_info.number}"

    task_url = f"https://github.com/ClickHouse/ClickHouse/actions/runs/{os.getenv('GITHUB_RUN_ID')}"
    docker_env += ' -e CHPC_ADD_REPORT_LINKS="<a href={}>Job (actions)</a> <a href={}>Tested commit</a>"'.format(
        task_url, pr_link)

    if 'RUN_BY_HASH_TOTAL' in os.environ:
        run_by_hash_total = int(os.getenv('RUN_BY_HASH_TOTAL'))
        run_by_hash_num = int(os.getenv('RUN_BY_HASH_NUM'))
        docker_env += f' -e CHPC_TEST_RUN_BY_HASH_TOTAL={run_by_hash_total} -e CHPC_TEST_RUN_BY_HASH_NUM={run_by_hash_num}'
        check_name_with_group = check_name + f' [{run_by_hash_num + 1}/{run_by_hash_total}]'
    else:
        check_name_with_group = check_name

    rerun_helper = RerunHelper(gh, pr_info, check_name_with_group)
    if rerun_helper.is_already_finished_by_status():
        logging.info(
            "Check is already finished according to github status, exiting")
        sys.exit(0)

    docker_image = get_image_with_version(reports_path, IMAGE_NAME)

    #with RamDrive(ramdrive_path, ramdrive_size):
    result_path = ramdrive_path
    if not os.path.exists(result_path):
        os.makedirs(result_path)

    run_command = get_run_command(result_path, result_path, repo_tests_path,
                                  pr_info.number, pr_info.sha, docker_env,
                                  docker_image)
Example #2
0
        return state, description, test_results, additional_files


if __name__ == "__main__":
    logging.basicConfig(level=logging.INFO)

    stopwatch = Stopwatch()

    repo_path = GITHUB_WORKSPACE
    temp_path = os.path.join(RUNNER_TEMP, "style_check")

    pr_info = PRInfo()

    gh = Github(get_best_robot_token())

    rerun_helper = RerunHelper(gh, pr_info, NAME)
    if rerun_helper.is_already_finished_by_status():
        logging.info("Check is already finished according to github status, exiting")
        sys.exit(0)

    if not os.path.exists(temp_path):
        os.makedirs(temp_path)

    docker_image = get_image_with_version(temp_path, "clickhouse/style-test")
    s3_helper = S3Helper("https://s3.amazonaws.com")

    subprocess.check_output(
        f"docker run -u $(id -u ${{USER}}):$(id -g ${{USER}}) --cap-add=SYS_PTRACE "
        f"--volume={repo_path}:/ClickHouse --volume={temp_path}:/test_output "
        f"{docker_image}",
        shell=True,
Example #3
0
def main():
    logging.basicConfig(level=logging.INFO)
    temp_path = TEMP_PATH
    logging.info("Reports path %s", REPORTS_PATH)

    if not os.path.exists(temp_path):
        os.makedirs(temp_path)

    build_check_name = sys.argv[1]
    needs_data = None
    required_builds = 0
    if os.path.exists(NEEDS_DATA_PATH):
        with open(NEEDS_DATA_PATH, "rb") as file_handler:
            needs_data = json.load(file_handler)
            required_builds = len(needs_data)

    logging.info("The next builds are required: %s", ", ".join(needs_data))

    gh = Github(get_best_robot_token())
    pr_info = PRInfo()
    rerun_helper = RerunHelper(gh, pr_info, build_check_name)
    if rerun_helper.is_already_finished_by_status():
        logging.info("Check is already finished according to github status, exiting")
        sys.exit(0)

    builds_for_check = CI_CONFIG["builds_report_config"][build_check_name]
    required_builds = required_builds or len(builds_for_check)

    # Collect reports from json artifacts
    builds_report_map = {}
    for root, _, files in os.walk(REPORTS_PATH):
        for f in files:
            if f.startswith("build_urls_") and f.endswith(".json"):
                logging.info("Found build report json %s", f)
                build_name = get_build_name_from_file_name(f)
                if build_name in builds_for_check:
                    with open(os.path.join(root, f), "rb") as file_handler:
                        builds_report_map[build_name] = json.load(file_handler)
                else:
                    logging.info(
                        "Skipping report %s for build %s, it's not in our reports list",
                        f,
                        build_name,
                    )

    # Sort reports by config order
    build_reports = [
        builds_report_map[build_name]
        for build_name in builds_for_check
        if build_name in builds_report_map
    ]

    some_builds_are_missing = len(build_reports) < required_builds
    missing_build_names = []
    if some_builds_are_missing:
        logging.warning(
            "Expected to get %s build results, got only %s",
            required_builds,
            len(build_reports),
        )
        missing_build_names = [
            name
            for name in needs_data
            if not any(rep for rep in build_reports if rep["job_name"] == name)
        ]
    else:
        logging.info("Got exactly %s builds", len(builds_report_map))

    # Group build artifacts by groups
    build_results = []  # type: List[BuildResult]
    build_artifacts = []  #
    build_logs = []

    for build_report in build_reports:
        build_result, build_artifacts_url, build_logs_url = process_report(build_report)
        logging.info(
            "Got %s artifact groups for build report report", len(build_result)
        )
        build_results.extend(build_result)
        build_artifacts.extend(build_artifacts_url)
        build_logs.extend(build_logs_url)

    for failed_job in missing_build_names:
        build_result, build_artifacts_url, build_logs_url = get_failed_report(
            failed_job
        )
        build_results.extend(build_result)
        build_artifacts.extend(build_artifacts_url)
        build_logs.extend(build_logs_url)

    total_groups = len(build_results)
    logging.info("Totally got %s artifact groups", total_groups)
    if total_groups == 0:
        logging.error("No success builds, failing check")
        sys.exit(1)

    s3_helper = S3Helper("https://s3.amazonaws.com")

    branch_url = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/commits/master"
    branch_name = "master"
    if pr_info.number != 0:
        branch_name = f"PR #{pr_info.number}"
        branch_url = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/pull/{pr_info.number}"
    commit_url = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/commit/{pr_info.sha}"
    task_url = GITHUB_RUN_URL
    report = create_build_html_report(
        build_check_name,
        build_results,
        build_logs,
        build_artifacts,
        task_url,
        branch_url,
        branch_name,
        commit_url,
    )

    report_path = os.path.join(temp_path, "report.html")
    with open(report_path, "w", encoding="utf-8") as fd:
        fd.write(report)

    logging.info("Going to upload prepared report")
    context_name_for_path = build_check_name.lower().replace(" ", "_")
    s3_path_prefix = (
        str(pr_info.number) + "/" + pr_info.sha + "/" + context_name_for_path
    )

    url = s3_helper.upload_build_file_to_s3(
        report_path, s3_path_prefix + "/report.html"
    )
    logging.info("Report url %s", url)
    print(f"::notice ::Report url: {url}")

    # Prepare a commit status
    ok_groups = 0
    summary_status = "success"
    for build_result in build_results:
        if build_result.status == "failure" and summary_status != "error":
            summary_status = "failure"
        if build_result.status == "error" or not build_result.status:
            summary_status = "error"

        if build_result.status == "success":
            ok_groups += 1

    if ok_groups == 0 or some_builds_are_missing:
        summary_status = "error"

    addition = ""
    if some_builds_are_missing:
        addition = f"({len(build_reports)} of {required_builds} builds are OK)"

    description = f"{ok_groups}/{total_groups} artifact groups are OK {addition}"

    commit = get_commit(gh, pr_info.sha)
    commit.create_status(
        context=build_check_name,
        description=description,
        state=summary_status,
        target_url=url,
    )

    if summary_status == "error":
        sys.exit(1)
Example #4
0

if __name__ == "__main__":
    logging.basicConfig(level=logging.INFO)
    reports_path = os.getenv("REPORTS_PATH", "./reports")
    temp_path = os.path.join(os.getenv("TEMP_PATH", "."))
    logging.info("Reports path %s", reports_path)

    if not os.path.exists(temp_path):
        os.makedirs(temp_path)

    build_check_name = sys.argv[1]

    gh = Github(get_best_robot_token())
    pr_info = PRInfo(get_event())
    rerun_helper = RerunHelper(gh, pr_info, build_check_name)
    if rerun_helper.is_already_finished_by_status():
        logging.info(
            "Check is already finished according to github status, exiting")
        sys.exit(0)

    reports_order = CI_CONFIG["builds_report_config"][build_check_name]
    logging.info("My reports list %s", reports_order)

    build_reports_map = {}
    for root, dirs, files in os.walk(reports_path):
        for f in files:
            if f.startswith("build_urls_") and f.endswith('.json'):
                logging.info("Found build report json %s", f)
                build_name = get_build_name_from_file_name(f)
                if build_name in reports_order:
Example #5
0
    logging.basicConfig(level=logging.INFO)
    logging.getLogger("git_helper").setLevel(logging.DEBUG)
    args = parse_args()

    stopwatch = Stopwatch()

    repo_path = GITHUB_WORKSPACE
    temp_path = os.path.join(RUNNER_TEMP, "style_check")

    pr_info = PRInfo()
    if args.push:
        checkout_head(pr_info)

    gh = GitHub(get_best_robot_token())

    rerun_helper = RerunHelper(gh, pr_info, NAME)
    if rerun_helper.is_already_finished_by_status():
        logging.info(
            "Check is already finished according to github status, exiting")
        # Finish with the same code as previous
        state = rerun_helper.get_finished_status().state  # type: ignore
        # state == "success" -> code = 0
        code = int(state != "success")
        sys.exit(code)

    if not os.path.exists(temp_path):
        os.makedirs(temp_path)

    docker_image = get_image_with_version(temp_path, "clickhouse/style-test")
    s3_helper = S3Helper("https://s3.amazonaws.com")