def main():
    logging.basicConfig(level=logging.INFO)
    stopwatch = Stopwatch()

    args = parse_args()
    if args.push:
        subprocess.check_output(  # pylint: disable=unexpected-keyword-arg
            "docker login --username 'robotclickhouse' --password-stdin",
            input=get_parameter_from_ssm("dockerhub_robot_password"),
            encoding="utf-8",
            shell=True,
        )

    to_merge = {}
    for suf in args.suffixes:
        to_merge[suf] = load_images(args.path, suf)

    changed_images = get_changed_images(check_sources(to_merge))

    os.environ["DOCKER_CLI_EXPERIMENTAL"] = "enabled"
    merged = merge_images(to_merge)

    status = "success"
    test_results = []  # type: List[Tuple[str, str]]
    for image, versions in merged.items():
        for tags in versions:
            manifest, test_result = create_manifest(image, tags, args.push)
            test_results.append((manifest, test_result))
            if test_result != "OK":
                status = "failure"

    with open(
        os.path.join(args.path, "changed_images.json"), "w", encoding="utf-8"
    ) as ci:
        json.dump(changed_images, ci)

    pr_info = PRInfo()
    s3_helper = S3Helper()

    url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results, [], NAME)

    print(f"::notice ::Report url: {url}")
    print(f'::set-output name=url_output::"{url}"')

    if not args.reports:
        return

    if changed_images:
        description = "Updated " + ", ".join(changed_images.keys())
    else:
        description = "Nothing to update"

    if len(description) >= 140:
        description = description[:136] + "..."

    gh = Github(get_best_robot_token(), per_page=100)
    post_commit_status(gh, pr_info.sha, NAME, description, status, url)

    prepared_events = prepare_tests_results_for_clickhouse(
        pr_info,
        test_results,
        status,
        stopwatch.duration_seconds,
        stopwatch.start_time_str,
        url,
        NAME,
    )
    ch_helper = ClickHouseHelper()
    ch_helper.insert_events_into(db="default", table="checks", events=prepared_events)
Example #2
0
        "--force",
        action="store_true",
        help="check the docs even if there no changes",
    )
    args = parser.parse_args()

    logging.basicConfig(level=logging.INFO)

    stopwatch = Stopwatch()

    temp_path = TEMP_PATH
    repo_path = REPO_COPY

    pr_info = PRInfo(need_changed_files=True)

    gh = Github(get_best_robot_token())

    rerun_helper = RerunHelper(gh, pr_info, NAME)
    if rerun_helper.is_already_finished_by_status():
        logging.info(
            "Check is already finished according to github status, exiting")
        sys.exit(0)

    if not pr_info.has_changes_in_documentation() and not args.force:
        logging.info("No changes in documentation")
        commit = get_commit(gh, pr_info.sha)
        commit.create_status(context=NAME,
                             description="No changes in docs",
                             state="success")
        sys.exit(0)
Example #3
0
def main():
    logging.basicConfig(level=logging.INFO)
    stopwatch = Stopwatch()
    makedirs(TEMP_PATH, exist_ok=True)

    args = parse_args()
    image = DockerImage(args.image_path, args.image_repo, False)
    args.release_type = auto_release_type(args.version, args.release_type)
    tags = gen_tags(args.version, args.release_type)
    NAME = f"Docker image {image.repo} building check (actions)"
    pr_info = None
    if CI:
        pr_info = PRInfo()
        release_or_pr = get_release_or_pr(pr_info, {"package_type": ""},
                                          args.version)
        args.bucket_prefix = (f"https://s3.amazonaws.com/{S3_BUILDS_BUCKET}/"
                              f"{release_or_pr}/{pr_info.sha}")

    if args.push:
        subprocess.check_output(  # pylint: disable=unexpected-keyword-arg
            "docker login --username 'robotclickhouse' --password-stdin",
            input=get_parameter_from_ssm("dockerhub_robot_password"),
            encoding="utf-8",
            shell=True,
        )
        NAME = f"Docker image {image.repo} build and push (actions)"

    logging.info("Following tags will be created: %s", ", ".join(tags))
    status = "success"
    test_results = []  # type: List[Tuple[str, str]]
    for os in args.os:
        for tag in tags:
            test_results.extend(
                build_and_push_image(image, args.push, args.bucket_prefix, os,
                                     tag, args.version))
            if test_results[-1][1] != "OK":
                status = "failure"

    pr_info = pr_info or PRInfo()
    s3_helper = S3Helper("https://s3.amazonaws.com")

    url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results,
                         [], NAME)

    print(f"::notice ::Report url: {url}")
    print(f'::set-output name=url_output::"{url}"')

    if not args.reports:
        return

    description = f"Processed tags: {', '.join(tags)}"

    if len(description) >= 140:
        description = description[:136] + "..."

    gh = Github(get_best_robot_token())
    post_commit_status(gh, pr_info.sha, NAME, description, status, url)

    prepared_events = prepare_tests_results_for_clickhouse(
        pr_info,
        test_results,
        status,
        stopwatch.duration_seconds,
        stopwatch.start_time_str,
        url,
        NAME,
    )
    ch_helper = ClickHouseHelper()
    ch_helper.insert_events_into(db="default",
                                 table="checks",
                                 events=prepared_events)
Example #4
0
    cmd = (
        "docker run " + f"--volume={repo_path}:/repo_folder "
        f"--volume={output_path}:/test_output "
        f"-e 'DATA={S3_DOWNLOAD}/{S3_TEST_REPORTS_BUCKET}/codebrowser/data' {image}"
    )
    return cmd


if __name__ == "__main__":
    logging.basicConfig(level=logging.INFO)

    stopwatch = Stopwatch()

    temp_path = os.getenv("TEMP_PATH", os.path.abspath("."))

    gh = Github(get_best_robot_token(), per_page=100)

    if not os.path.exists(temp_path):
        os.makedirs(temp_path)

    docker_image = get_image_with_version(IMAGES_PATH,
                                          "clickhouse/codebrowser")
    s3_helper = S3Helper()

    result_path = os.path.join(temp_path, "result_path")
    if not os.path.exists(result_path):
        os.makedirs(result_path)

    run_command = get_run_command(REPO_COPY, result_path, docker_image)

    logging.info("Going to run codebrowser: %s", run_command)
def main():
    logging.basicConfig(level=logging.INFO)
    stopwatch = Stopwatch()

    args = parse_args()
    if args.suffix:
        global NAME
        NAME += f" {args.suffix}"
        changed_json = os.path.join(TEMP_PATH, f"changed_images_{args.suffix}.json")
    else:
        changed_json = os.path.join(TEMP_PATH, "changed_images.json")

    if args.push:
        subprocess.check_output(  # pylint: disable=unexpected-keyword-arg
            "docker login --username 'robotclickhouse' --password-stdin",
            input=get_parameter_from_ssm("dockerhub_robot_password"),
            encoding="utf-8",
            shell=True,
        )

    if os.path.exists(TEMP_PATH):
        shutil.rmtree(TEMP_PATH)
    os.makedirs(TEMP_PATH)

    images_dict = get_images_dict(GITHUB_WORKSPACE, "docker/images.json")

    pr_info = PRInfo()
    if args.all:
        pr_info.changed_files = set(images_dict.keys())
    elif args.image_path:
        pr_info.changed_files = set(i for i in args.image_path)
    else:
        pr_info.fetch_changed_files()

    changed_images = get_changed_docker_images(pr_info, images_dict)
    if changed_images:
        logging.info(
            "Has changed images: %s", ", ".join([im.path for im in changed_images])
        )

    image_versions, result_version = gen_versions(pr_info, args.suffix)

    result_images = {}
    images_processing_result = []
    for image in changed_images:
        images_processing_result += process_image_with_parents(
            image, image_versions, args.push
        )
        result_images[image.repo] = result_version

    if changed_images:
        description = "Updated " + ",".join([im.repo for im in changed_images])
    else:
        description = "Nothing to update"

    if len(description) >= 140:
        description = description[:136] + "..."

    with open(changed_json, "w", encoding="utf-8") as images_file:
        json.dump(result_images, images_file)

    s3_helper = S3Helper("https://s3.amazonaws.com")

    s3_path_prefix = (
        str(pr_info.number) + "/" + pr_info.sha + "/" + NAME.lower().replace(" ", "_")
    )
    status, test_results = process_test_results(
        s3_helper, images_processing_result, s3_path_prefix
    )

    url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results, [], NAME)

    print(f"::notice ::Report url: {url}")
    print(f'::set-output name=url_output::"{url}"')

    if not args.reports:
        return

    gh = Github(get_best_robot_token())
    post_commit_status(gh, pr_info.sha, NAME, description, status, url)

    prepared_events = prepare_tests_results_for_clickhouse(
        pr_info,
        test_results,
        status,
        stopwatch.duration_seconds,
        stopwatch.start_time_str,
        url,
        NAME,
    )
    ch_helper = ClickHouseHelper()
    ch_helper.insert_events_into(db="default", table="checks", events=prepared_events)

    if status == "error":
        sys.exit(1)
Example #6
0
def main():
    logging.basicConfig(level=logging.INFO)
    temp_path = TEMP_PATH
    logging.info("Reports path %s", REPORTS_PATH)

    if not os.path.exists(temp_path):
        os.makedirs(temp_path)

    build_check_name = sys.argv[1]
    needs_data = None
    required_builds = 0
    if os.path.exists(NEEDS_DATA_PATH):
        with open(NEEDS_DATA_PATH, "rb") as file_handler:
            needs_data = json.load(file_handler)
            required_builds = len(needs_data)

    logging.info("The next builds are required: %s", ", ".join(needs_data))

    gh = Github(get_best_robot_token())
    pr_info = PRInfo()
    rerun_helper = RerunHelper(gh, pr_info, build_check_name)
    if rerun_helper.is_already_finished_by_status():
        logging.info("Check is already finished according to github status, exiting")
        sys.exit(0)

    builds_for_check = CI_CONFIG["builds_report_config"][build_check_name]
    required_builds = required_builds or len(builds_for_check)

    # Collect reports from json artifacts
    builds_report_map = {}
    for root, _, files in os.walk(REPORTS_PATH):
        for f in files:
            if f.startswith("build_urls_") and f.endswith(".json"):
                logging.info("Found build report json %s", f)
                build_name = get_build_name_from_file_name(f)
                if build_name in builds_for_check:
                    with open(os.path.join(root, f), "rb") as file_handler:
                        builds_report_map[build_name] = json.load(file_handler)
                else:
                    logging.info(
                        "Skipping report %s for build %s, it's not in our reports list",
                        f,
                        build_name,
                    )

    # Sort reports by config order
    build_reports = [
        builds_report_map[build_name]
        for build_name in builds_for_check
        if build_name in builds_report_map
    ]

    some_builds_are_missing = len(build_reports) < required_builds
    missing_build_names = []
    if some_builds_are_missing:
        logging.warning(
            "Expected to get %s build results, got only %s",
            required_builds,
            len(build_reports),
        )
        missing_build_names = [
            name
            for name in needs_data
            if not any(rep for rep in build_reports if rep["job_name"] == name)
        ]
    else:
        logging.info("Got exactly %s builds", len(builds_report_map))

    # Group build artifacts by groups
    build_results = []  # type: List[BuildResult]
    build_artifacts = []  #
    build_logs = []

    for build_report in build_reports:
        build_result, build_artifacts_url, build_logs_url = process_report(build_report)
        logging.info(
            "Got %s artifact groups for build report report", len(build_result)
        )
        build_results.extend(build_result)
        build_artifacts.extend(build_artifacts_url)
        build_logs.extend(build_logs_url)

    for failed_job in missing_build_names:
        build_result, build_artifacts_url, build_logs_url = get_failed_report(
            failed_job
        )
        build_results.extend(build_result)
        build_artifacts.extend(build_artifacts_url)
        build_logs.extend(build_logs_url)

    total_groups = len(build_results)
    logging.info("Totally got %s artifact groups", total_groups)
    if total_groups == 0:
        logging.error("No success builds, failing check")
        sys.exit(1)

    s3_helper = S3Helper("https://s3.amazonaws.com")

    branch_url = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/commits/master"
    branch_name = "master"
    if pr_info.number != 0:
        branch_name = f"PR #{pr_info.number}"
        branch_url = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/pull/{pr_info.number}"
    commit_url = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/commit/{pr_info.sha}"
    task_url = GITHUB_RUN_URL
    report = create_build_html_report(
        build_check_name,
        build_results,
        build_logs,
        build_artifacts,
        task_url,
        branch_url,
        branch_name,
        commit_url,
    )

    report_path = os.path.join(temp_path, "report.html")
    with open(report_path, "w", encoding="utf-8") as fd:
        fd.write(report)

    logging.info("Going to upload prepared report")
    context_name_for_path = build_check_name.lower().replace(" ", "_")
    s3_path_prefix = (
        str(pr_info.number) + "/" + pr_info.sha + "/" + context_name_for_path
    )

    url = s3_helper.upload_build_file_to_s3(
        report_path, s3_path_prefix + "/report.html"
    )
    logging.info("Report url %s", url)
    print(f"::notice ::Report url: {url}")

    # Prepare a commit status
    ok_groups = 0
    summary_status = "success"
    for build_result in build_results:
        if build_result.status == "failure" and summary_status != "error":
            summary_status = "failure"
        if build_result.status == "error" or not build_result.status:
            summary_status = "error"

        if build_result.status == "success":
            ok_groups += 1

    if ok_groups == 0 or some_builds_are_missing:
        summary_status = "error"

    addition = ""
    if some_builds_are_missing:
        addition = f"({len(build_reports)} of {required_builds} builds are OK)"

    description = f"{ok_groups}/{total_groups} artifact groups are OK {addition}"

    commit = get_commit(gh, pr_info.sha)
    commit.create_status(
        context=build_check_name,
        description=description,
        state=summary_status,
        target_url=url,
    )

    if summary_status == "error":
        sys.exit(1)
Example #7
0
def main():
    logging.basicConfig(level=logging.INFO)
    stopwatch = Stopwatch()

    args = parse_args()
    if args.suffix:
        global NAME
        NAME += f" {args.suffix}"
        changed_json = os.path.join(TEMP_PATH,
                                    f"changed_images_{args.suffix}.json")
    else:
        changed_json = os.path.join(TEMP_PATH, "changed_images.json")

    push = not args.no_push_images
    if push:
        subprocess.check_output(  # pylint: disable=unexpected-keyword-arg
            "docker login --username 'robotclickhouse' --password-stdin",
            input=get_parameter_from_ssm("dockerhub_robot_password"),
            encoding="utf-8",
            shell=True,
        )

    repo_path = GITHUB_WORKSPACE

    if os.path.exists(TEMP_PATH):
        shutil.rmtree(TEMP_PATH)
    os.makedirs(TEMP_PATH)

    if args.image_path:
        pr_info = PRInfo()
        pr_info.changed_files = set(i for i in args.image_path)
    else:
        pr_info = PRInfo(need_changed_files=True)

    changed_images = get_changed_docker_images(pr_info, repo_path,
                                               "docker/images.json")
    logging.info("Has changed images %s",
                 ", ".join([str(image[0]) for image in changed_images]))
    pr_commit_version = str(pr_info.number) + "-" + pr_info.sha
    # The order is important, PR number is used as cache during the build
    versions = [str(pr_info.number), pr_commit_version]
    result_version = pr_commit_version
    if pr_info.number == 0:
        # First get the latest for cache
        versions.insert(0, "latest")

    if args.suffix:
        # We should build architecture specific images separately and merge a
        # manifest lately in a different script
        versions = [f"{v}-{args.suffix}" for v in versions]
        # changed_images_{suffix}.json should contain all changed images
        result_version = versions

    result_images = {}
    images_processing_result = []
    for rel_path, image_name in changed_images:
        full_path = os.path.join(repo_path, rel_path)
        images_processing_result += process_single_image(
            versions, full_path, image_name, push)
        result_images[image_name] = result_version

    if changed_images:
        description = "Updated " + ",".join([im[1] for im in changed_images])
    else:
        description = "Nothing to update"

    if len(description) >= 140:
        description = description[:136] + "..."

    with open(changed_json, "w") as images_file:
        json.dump(result_images, images_file)

    s3_helper = S3Helper("https://s3.amazonaws.com")

    s3_path_prefix = (str(pr_info.number) + "/" + pr_info.sha + "/" +
                      NAME.lower().replace(" ", "_"))
    status, test_results = process_test_results(s3_helper,
                                                images_processing_result,
                                                s3_path_prefix)

    url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results,
                         [], NAME)

    print("::notice ::Report url: {}".format(url))
    print('::set-output name=url_output::"{}"'.format(url))

    if args.no_reports:
        return

    gh = Github(get_best_robot_token())
    post_commit_status(gh, pr_info.sha, NAME, description, status, url)

    prepared_events = prepare_tests_results_for_clickhouse(
        pr_info,
        test_results,
        status,
        stopwatch.duration_seconds,
        stopwatch.start_time_str,
        url,
        NAME,
    )
    ch_helper = ClickHouseHelper()
    ch_helper.insert_events_into(db="gh-data",
                                 table="checks",
                                 events=prepared_events)