Ejemplo n.º 1
0
 def test_get_changed_docker_images(self):
     pr_info = PRInfo(PRInfo.default_event.copy())
     pr_info.changed_files = {
         "docker/test/stateless",
         "docker/test/base",
         "docker/docs/builder",
     }
     images = sorted(
         list(
             di.get_changed_docker_images(pr_info, "/",
                                          self.docker_images_path)))
     self.maxDiff = None
     expected = sorted([
         di.DockerImage("docker/test/base", "clickhouse/test-base"),
         di.DockerImage("docker/docs/builder", "clickhouse/docs-builder"),
         di.DockerImage(
             "docker/test/stateless",
             "clickhouse/stateless-test",
             "clickhouse/test-base",
         ),
         di.DockerImage(
             "docker/test/integration/base",
             "clickhouse/integration-test",
             "clickhouse/test-base",
         ),
         di.DockerImage("docker/test/fuzzer", "clickhouse/fuzzer",
                        "clickhouse/test-base"),
         di.DockerImage(
             "docker/test/keeper-jepsen",
             "clickhouse/keeper-jepsen-test",
             "clickhouse/test-base",
         ),
         di.DockerImage(
             "docker/docs/check",
             "clickhouse/docs-check",
             "clickhouse/docs-builder",
         ),
         di.DockerImage(
             "docker/docs/release",
             "clickhouse/docs-release",
             "clickhouse/docs-builder",
         ),
         di.DockerImage(
             "docker/test/stateful",
             "clickhouse/stateful-test",
             "clickhouse/stateless-test",
         ),
         di.DockerImage(
             "docker/test/unit",
             "clickhouse/unit-test",
             "clickhouse/stateless-test",
         ),
         di.DockerImage(
             "docker/test/stress",
             "clickhouse/stress-test",
             "clickhouse/stateful-test",
         ),
     ])
     self.assertEqual(images, expected)
Ejemplo n.º 2
0
 def test_gen_version(self):
     pr_info = PRInfo(PRInfo.default_event.copy())
     versions, result_version = di.gen_versions(pr_info, None)
     self.assertEqual(versions, ["latest", "0", "0-HEAD"])
     self.assertEqual(result_version, "0-HEAD")
     versions, result_version = di.gen_versions(pr_info, "suffix")
     self.assertEqual(versions, ["latest-suffix", "0-suffix", "0-HEAD-suffix"])
     self.assertEqual(result_version, versions)
     pr_info.number = 1
     versions, result_version = di.gen_versions(pr_info, None)
     self.assertEqual(versions, ["1", "1-HEAD"])
     self.assertEqual(result_version, "1-HEAD")
Ejemplo n.º 3
0
if __name__ == "__main__":
    logging.basicConfig(level=logging.INFO)

    stopwatch = Stopwatch()

    temp_path = os.getenv("TEMP_PATH", os.path.abspath("."))
    repo_path = os.getenv("REPO_COPY", os.path.abspath("../../"))
    reports_path = os.getenv("REPORTS_PATH", "./reports")

    check_name = sys.argv[1]

    if not os.path.exists(temp_path):
        os.makedirs(temp_path)

    is_flaky_check = 'flaky' in check_name
    pr_info = PRInfo(get_event(), need_changed_files=is_flaky_check)

    gh = Github(get_best_robot_token())

    images = get_images_with_versions(temp_path, IMAGES)
    images_with_versions = {i.name: i.version for i in images}
    result_path = os.path.join(temp_path, "output_dir")
    if not os.path.exists(result_path):
        os.makedirs(result_path)

    work_path = os.path.join(temp_path, "workdir")
    if not os.path.exists(work_path):
        os.makedirs(work_path)

    build_path = os.path.join(temp_path, "build")
    if not os.path.exists(build_path):
Ejemplo n.º 4
0
def main():
    logging.basicConfig(level=logging.INFO)
    stopwatch = Stopwatch()

    args = parse_args()
    if args.suffix:
        global NAME
        NAME += f" {args.suffix}"
        changed_json = os.path.join(TEMP_PATH,
                                    f"changed_images_{args.suffix}.json")
    else:
        changed_json = os.path.join(TEMP_PATH, "changed_images.json")

    if args.push:
        subprocess.check_output(  # pylint: disable=unexpected-keyword-arg
            "docker login --username 'robotclickhouse' --password-stdin",
            input=get_parameter_from_ssm("dockerhub_robot_password"),
            encoding="utf-8",
            shell=True,
        )

    if os.path.exists(TEMP_PATH):
        shutil.rmtree(TEMP_PATH)
    os.makedirs(TEMP_PATH)

    images_dict = get_images_dict(GITHUB_WORKSPACE, "docker/images.json")

    pr_info = PRInfo()
    if args.all:
        pr_info.changed_files = set(images_dict.keys())
    elif args.image_path:
        pr_info.changed_files = set(i for i in args.image_path)
    else:
        try:
            pr_info.fetch_changed_files()
        except TypeError:
            # If the event does not contain diff, nothing will be built
            pass

    changed_images = get_changed_docker_images(pr_info, images_dict)
    if changed_images:
        logging.info("Has changed images: %s",
                     ", ".join([im.path for im in changed_images]))

    image_versions, result_version = gen_versions(pr_info, args.suffix)

    result_images = {}
    images_processing_result = []
    for image in changed_images:
        # If we are in backport PR, then pr_info.release_pr is defined
        # We use it as tag to reduce rebuilding time
        images_processing_result += process_image_with_parents(
            image, image_versions, pr_info.release_pr, args.push)
        result_images[image.repo] = result_version

    if changed_images:
        description = "Updated " + ",".join([im.repo for im in changed_images])
    else:
        description = "Nothing to update"

    if len(description) >= 140:
        description = description[:136] + "..."

    with open(changed_json, "w", encoding="utf-8") as images_file:
        json.dump(result_images, images_file)

    s3_helper = S3Helper()

    s3_path_prefix = (str(pr_info.number) + "/" + pr_info.sha + "/" +
                      NAME.lower().replace(" ", "_"))
    status, test_results = process_test_results(s3_helper,
                                                images_processing_result,
                                                s3_path_prefix)

    url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results,
                         [], NAME)

    print(f"::notice ::Report url: {url}")
    print(f'::set-output name=url_output::"{url}"')

    if not args.reports:
        return

    gh = Github(get_best_robot_token(), per_page=100)
    post_commit_status(gh, pr_info.sha, NAME, description, status, url)

    prepared_events = prepare_tests_results_for_clickhouse(
        pr_info,
        test_results,
        status,
        stopwatch.duration_seconds,
        stopwatch.start_time_str,
        url,
        NAME,
    )
    ch_helper = ClickHouseHelper()
    ch_helper.insert_events_into(db="default",
                                 table="checks",
                                 events=prepared_events)

    if status == "error":
        sys.exit(1)
Ejemplo n.º 5
0
    temp_path = TEMP_PATH
    repo_path = REPO_COPY
    reports_path = REPORTS_PATH

    args = parse_args()
    check_name = args.check_name
    kill_timeout = args.kill_timeout
    validate_bugix_check = args.validate_bugfix

    flaky_check = "flaky" in check_name.lower()

    run_changed_tests = flaky_check or validate_bugix_check
    gh = Github(get_best_robot_token())

    pr_info = PRInfo(need_changed_files=run_changed_tests)

    if not os.path.exists(temp_path):
        os.makedirs(temp_path)

    if validate_bugix_check and "pr-bugfix" not in pr_info.labels:
        if args.post_commit_status == "file":
            post_commit_status_to_file(
                os.path.join(temp_path, "post_commit_status.tsv"),
                "Skipped (no pr-bugfix)",
                "success",
                "null",
            )
        logging.info("Skipping '%s' (no pr-bugfix)", check_name)
        sys.exit(0)
Ejemplo n.º 6
0
    return state, description, test_results, additional_files


if __name__ == "__main__":
    logging.basicConfig(level=logging.INFO)
    temp_path = os.getenv("TEMP_PATH", os.path.abspath("."))
    caches_path = os.getenv("CACHES_PATH", temp_path)

    if not os.path.exists(temp_path):
        os.makedirs(temp_path)

    with open(os.getenv('GITHUB_EVENT_PATH'), 'r') as event_file:
        event = json.load(event_file)

    pr_info = PRInfo(event)

    gh = Github(get_best_robot_token())

    docker_image = get_image_with_version(temp_path, 'clickhouse/fasttest')

    s3_helper = S3Helper('https://s3.amazonaws.com')

    workspace = os.path.join(temp_path, "fasttest-workspace")
    if not os.path.exists(workspace):
        os.makedirs(workspace)

    output_path = os.path.join(temp_path, "fasttest-output")
    if not os.path.exists(output_path):
        os.makedirs(output_path)
Ejemplo n.º 7
0
def main():
    logging.basicConfig(level=logging.INFO)
    stopwatch = Stopwatch()

    args = parse_args()
    if args.suffix:
        global NAME
        NAME += f" {args.suffix}"
        changed_json = os.path.join(TEMP_PATH,
                                    f"changed_images_{args.suffix}.json")
    else:
        changed_json = os.path.join(TEMP_PATH, "changed_images.json")

    push = not args.no_push_images
    if push:
        subprocess.check_output(  # pylint: disable=unexpected-keyword-arg
            "docker login --username 'robotclickhouse' --password-stdin",
            input=get_parameter_from_ssm("dockerhub_robot_password"),
            encoding="utf-8",
            shell=True,
        )

    if os.path.exists(TEMP_PATH):
        shutil.rmtree(TEMP_PATH)
    os.makedirs(TEMP_PATH)

    if args.image_path:
        pr_info = PRInfo()
        pr_info.changed_files = set(i for i in args.image_path)
    else:
        pr_info = PRInfo(need_changed_files=True)

    changed_images = get_changed_docker_images(pr_info, GITHUB_WORKSPACE,
                                               "docker/images.json")
    logging.info("Has changed images %s",
                 ", ".join([im.path for im in changed_images]))

    image_versions, result_version = gen_versions(pr_info, args.suffix)

    result_images = {}
    images_processing_result = []
    for image in changed_images:
        images_processing_result += process_image_with_parents(
            image, image_versions, push)
        result_images[image.repo] = result_version

    if changed_images:
        description = "Updated " + ",".join([im.repo for im in changed_images])
    else:
        description = "Nothing to update"

    if len(description) >= 140:
        description = description[:136] + "..."

    with open(changed_json, "w", encoding="utf-8") as images_file:
        json.dump(result_images, images_file)

    s3_helper = S3Helper("https://s3.amazonaws.com")

    s3_path_prefix = (str(pr_info.number) + "/" + pr_info.sha + "/" +
                      NAME.lower().replace(" ", "_"))
    status, test_results = process_test_results(s3_helper,
                                                images_processing_result,
                                                s3_path_prefix)

    url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results,
                         [], NAME)

    print(f"::notice ::Report url: {url}")
    print(f'::set-output name=url_output::"{url}"')

    if args.no_reports:
        return

    gh = Github(get_best_robot_token())
    post_commit_status(gh, pr_info.sha, NAME, description, status, url)

    prepared_events = prepare_tests_results_for_clickhouse(
        pr_info,
        test_results,
        status,
        stopwatch.duration_seconds,
        stopwatch.start_time_str,
        url,
        NAME,
    )
    ch_helper = ClickHouseHelper()
    ch_helper.insert_events_into(db="gh-data",
                                 table="checks",
                                 events=prepared_events)
Ejemplo n.º 8
0
    repo_path = os.getenv("GITHUB_WORKSPACE", os.path.abspath("../../"))
    temp_path = os.path.join(
        os.getenv("RUNNER_TEMP", os.path.abspath("./temp")),
        'docker_images_check')
    dockerhub_password = get_parameter_from_ssm('dockerhub_robot_password')

    if os.path.exists(temp_path):
        shutil.rmtree(temp_path)

    if not os.path.exists(temp_path):
        os.makedirs(temp_path)

    with open(os.getenv('GITHUB_EVENT_PATH'), 'r') as event_file:
        event = json.load(event_file)

    pr_info = PRInfo(event, False, True)
    changed_images, dockerhub_repo_name = get_changed_docker_images(
        pr_info, repo_path, "docker/images.json")
    logging.info("Has changed images %s",
                 ', '.join([str(image[0]) for image in changed_images]))
    pr_commit_version = str(pr_info.number) + '-' + pr_info.sha
    versions = [str(pr_info.number), pr_commit_version]
    if pr_info.number == 0:
        versions.append("latest")

    subprocess.check_output(
        "docker login --username 'robotclickhouse' --password '{}'".format(
            dockerhub_password),
        shell=True)

    result_images = {}
Ejemplo n.º 9
0
            r"(?i)doc|((non|in|not|un)[-\s]*significant)|(not[ ]*for[ ]*changelog)",
            category,
    ):
        return "", category

    if not entry:
        return f"Changelog entry required for category '{category}'", category

    return "", category


if __name__ == "__main__":
    logging.basicConfig(level=logging.INFO)

    pr_info = PRInfo(need_orgs=True,
                     pr_event_from_api=True,
                     need_changed_files=True)
    can_run, description, labels_state = should_run_checks_for_pr(pr_info)
    gh = Github(get_best_robot_token())
    commit = get_commit(gh, pr_info.sha)

    description_error, category = check_pr_description(pr_info)
    pr_labels_to_add = []
    pr_labels_to_remove = []
    if (category in CATEGORY_TO_LABEL
            and CATEGORY_TO_LABEL[category] not in pr_info.labels):
        pr_labels_to_add.append(CATEGORY_TO_LABEL[category])

    for label in pr_info.labels:
        if (label in CATEGORY_TO_LABEL.values()
                and category in CATEGORY_TO_LABEL
Ejemplo n.º 10
0
if __name__ == "__main__":
    logging.basicConfig(level=logging.INFO)

    stopwatch = Stopwatch()

    repo_path = os.getenv("GITHUB_WORKSPACE", os.path.abspath("../../"))
    temp_path = os.path.join(os.getenv("RUNNER_TEMP", os.path.abspath("./temp")), 'docker_images_check')
    dockerhub_password = get_parameter_from_ssm('dockerhub_robot_password')

    if os.path.exists(temp_path):
        shutil.rmtree(temp_path)

    if not os.path.exists(temp_path):
        os.makedirs(temp_path)

    pr_info = PRInfo(get_event(), need_changed_files=True)
    changed_images, dockerhub_repo_name = get_changed_docker_images(pr_info, repo_path, "docker/images.json")
    logging.info("Has changed images %s", ', '.join([str(image[0]) for image in changed_images]))
    pr_commit_version = str(pr_info.number) + '-' + pr_info.sha
    versions = [str(pr_info.number), pr_commit_version]
    if pr_info.number == 0:
        versions.append("latest")

    subprocess.check_output("docker login --username 'robotclickhouse' --password '{}'".format(dockerhub_password), shell=True)

    result_images = {}
    images_processing_result = []
    for rel_path, image_name in changed_images:
        full_path = os.path.join(repo_path, rel_path)
        images_processing_result += process_single_image(versions, full_path, image_name)
        result_images[image_name] = pr_commit_version
Ejemplo n.º 11
0
    return True, "No special conditions apply"


def get_commit(gh, commit_sha):
    repo = gh.get_repo(os.getenv("GITHUB_REPOSITORY", "ClickHouse/ClickHouse"))
    commit = repo.get_commit(commit_sha)
    return commit


if __name__ == "__main__":
    logging.basicConfig(level=logging.INFO)
    with open(os.getenv('GITHUB_EVENT_PATH'), 'r') as event_file:
        event = json.load(event_file)

    pr_info = PRInfo(event, need_orgs=True)
    can_run, description = should_run_checks_for_pr(pr_info)
    gh = Github(get_best_robot_token())
    commit = get_commit(gh, pr_info.sha)
    url = f"https://github.com/ClickHouse/ClickHouse/actions/runs/{os.getenv('GITHUB_RUN_ID')}"
    if not can_run:
        print("::notice ::Cannot run")
        commit.create_status(context=NAME,
                             description=description,
                             state="failure",
                             target_url=url)
        sys.exit(1)
    else:
        if 'pr-documentation' in pr_info.labels or 'pr-doc-fix' in pr_info.labels:
            commit.create_status(
                context=NAME,
Ejemplo n.º 12
0
def filter_statuses(statuses):
    """
    Squash statuses to latest state
    1. context="first", state="success", update_time=1
    2. context="second", state="success", update_time=2
    3. context="first", stat="failure", update_time=3
    =========>
    1. context="second", state="success"
    2. context="first", stat="failure"
    """
    filt = {}
    for status in sorted(statuses, key=lambda x: x.updated_at):
        filt[status.context] = status
    return filt


if __name__ == "__main__":
    logging.basicConfig(level=logging.INFO)

    pr_info = PRInfo(get_event(), need_orgs=True)
    gh = Github(get_best_robot_token())
    commit = get_commit(gh, pr_info.sha)

    url = f"{os.getenv('GITHUB_SERVER_URL')}/{os.getenv('GITHUB_REPOSITORY')}/actions/runs/{os.getenv('GITHUB_RUN_ID')}"
    statuses = filter_statuses(list(commit.get_statuses()))
    if NAME in statuses and statuses[NAME].state == "pending":
        commit.create_status(context=NAME,
                             description="All checks finished",
                             state="success",
                             target_url=url)
Ejemplo n.º 13
0
    )
    parser.add_argument(
        "--force",
        action="store_true",
        help="check the docs even if there no changes",
    )
    args = parser.parse_args()

    logging.basicConfig(level=logging.INFO)

    stopwatch = Stopwatch()

    temp_path = TEMP_PATH
    repo_path = REPO_COPY

    pr_info = PRInfo(need_changed_files=True)

    gh = Github(get_best_robot_token())

    rerun_helper = RerunHelper(gh, pr_info, NAME)
    if rerun_helper.is_already_finished_by_status():
        logging.info("Check is already finished according to github status, exiting")
        sys.exit(0)

    if not pr_info.has_changes_in_documentation() and not args.force:
        logging.info("No changes in documentation")
        commit = get_commit(gh, pr_info.sha)
        commit.create_status(
            context=NAME, description="No changes in docs", state="success"
        )
        sys.exit(0)
 def add_pr(self, pr):
     self.prs.append(PRInfo(pr.created_at, pr.html_url, pr.closed_at))
     self.number_of_prs += 1
Ejemplo n.º 15
0
    if re.match(
            r"(?i)doc|((non|in|not|un)[-\s]*significant)|(not[ ]*for[ ]*changelog)",
            category,
    ):
        return ""

    if not entry:
        return f"Changelog entry required for category '{category}'"

    return ""


if __name__ == "__main__":
    logging.basicConfig(level=logging.INFO)

    pr_info = PRInfo(need_orgs=True, pr_event_from_api=True)
    can_run, description, labels_state = should_run_checks_for_pr(pr_info)
    gh = Github(get_best_robot_token())
    commit = get_commit(gh, pr_info.sha)

    description_report = check_pr_description(pr_info)[:139]
    if description_report:
        print("::notice ::Cannot run, description does not match the template")
        logging.info("PR body doesn't match the template: (start)\n%s\n(end)",
                     pr_info.body)
        url = (f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/"
               "blob/master/.github/PULL_REQUEST_TEMPLATE.md?plain=1")
        commit.create_status(
            context=NAME,
            description=description_report,
            state="failure",
Ejemplo n.º 16
0
    if "RUN_BY_HASH_NUM" in os.environ:
        run_by_hash_num = int(os.getenv("RUN_BY_HASH_NUM"))
        run_by_hash_total = int(os.getenv("RUN_BY_HASH_TOTAL"))
        check_name_with_group = (
            check_name + f" [{run_by_hash_num + 1}/{run_by_hash_total}]")
    else:
        run_by_hash_num = 0
        run_by_hash_total = 0
        check_name_with_group = check_name

    if not os.path.exists(temp_path):
        os.makedirs(temp_path)

    is_flaky_check = "flaky" in check_name
    pr_info = PRInfo(need_changed_files=is_flaky_check or validate_bugix_check)

    if validate_bugix_check and "pr-bugfix" not in pr_info.labels:
        if args.post_commit_status == "file":
            post_commit_status_to_file(
                os.path.join(temp_path, "post_commit_status.tsv"),
                "Skipped (no pr-bugfix)",
                "success",
                "null",
            )
        logging.info("Skipping '%s' (no pr-bugfix)", check_name)
        sys.exit(0)

    gh = Github(get_best_robot_token())

    rerun_helper = RerunHelper(gh, pr_info, check_name_with_group)
Ejemplo n.º 17
0
from ssh import SSHKey
from upload_result_helper import upload_results
from docker_pull_helper import get_image_with_version
from commit_status_helper import get_commit
from rerun_helper import RerunHelper

NAME = "Docs Release (actions)"

if __name__ == "__main__":
    logging.basicConfig(level=logging.INFO)

    temp_path = TEMP_PATH
    repo_path = REPO_COPY

    gh = Github(get_best_robot_token())
    pr_info = PRInfo(need_changed_files=True)
    rerun_helper = RerunHelper(gh, pr_info, NAME)
    if rerun_helper.is_already_finished_by_status():
        logging.info("Check is already finished according to github status, exiting")
        sys.exit(0)

    if not os.path.exists(temp_path):
        os.makedirs(temp_path)

    docker_image = get_image_with_version(temp_path, "clickhouse/docs-release")

    test_output = os.path.join(temp_path, "docs_release_log")
    if not os.path.exists(test_output):
        os.makedirs(test_output)

    token = CLOUDFLARE_TOKEN
Ejemplo n.º 18
0
def main():
    logging.basicConfig(level=logging.INFO)
    temp_path = TEMP_PATH
    logging.info("Reports path %s", REPORTS_PATH)

    if not os.path.exists(temp_path):
        os.makedirs(temp_path)

    build_check_name = sys.argv[1]
    needs_data = None
    required_builds = 0
    if os.path.exists(NEEDS_DATA_PATH):
        with open(NEEDS_DATA_PATH, "rb") as file_handler:
            needs_data = json.load(file_handler)
            required_builds = len(needs_data)

    logging.info("The next builds are required: %s", ", ".join(needs_data))

    gh = Github(get_best_robot_token())
    pr_info = PRInfo()
    rerun_helper = RerunHelper(gh, pr_info, build_check_name)
    if rerun_helper.is_already_finished_by_status():
        logging.info("Check is already finished according to github status, exiting")
        sys.exit(0)

    builds_for_check = CI_CONFIG["builds_report_config"][build_check_name]
    required_builds = required_builds or len(builds_for_check)

    # Collect reports from json artifacts
    builds_report_map = {}
    for root, _, files in os.walk(REPORTS_PATH):
        for f in files:
            if f.startswith("build_urls_") and f.endswith(".json"):
                logging.info("Found build report json %s", f)
                build_name = get_build_name_from_file_name(f)
                if build_name in builds_for_check:
                    with open(os.path.join(root, f), "rb") as file_handler:
                        builds_report_map[build_name] = json.load(file_handler)
                else:
                    logging.info(
                        "Skipping report %s for build %s, it's not in our reports list",
                        f,
                        build_name,
                    )

    # Sort reports by config order
    build_reports = [
        builds_report_map[build_name]
        for build_name in builds_for_check
        if build_name in builds_report_map
    ]

    some_builds_are_missing = len(build_reports) < required_builds
    missing_build_names = []
    if some_builds_are_missing:
        logging.warning(
            "Expected to get %s build results, got only %s",
            required_builds,
            len(build_reports),
        )
        missing_build_names = [
            name
            for name in needs_data
            if not any(rep for rep in build_reports if rep["job_name"] == name)
        ]
    else:
        logging.info("Got exactly %s builds", len(builds_report_map))

    # Group build artifacts by groups
    build_results = []  # type: List[BuildResult]
    build_artifacts = []  #
    build_logs = []

    for build_report in build_reports:
        build_result, build_artifacts_url, build_logs_url = process_report(build_report)
        logging.info(
            "Got %s artifact groups for build report report", len(build_result)
        )
        build_results.extend(build_result)
        build_artifacts.extend(build_artifacts_url)
        build_logs.extend(build_logs_url)

    for failed_job in missing_build_names:
        build_result, build_artifacts_url, build_logs_url = get_failed_report(
            failed_job
        )
        build_results.extend(build_result)
        build_artifacts.extend(build_artifacts_url)
        build_logs.extend(build_logs_url)

    total_groups = len(build_results)
    logging.info("Totally got %s artifact groups", total_groups)
    if total_groups == 0:
        logging.error("No success builds, failing check")
        sys.exit(1)

    s3_helper = S3Helper("https://s3.amazonaws.com")

    branch_url = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/commits/master"
    branch_name = "master"
    if pr_info.number != 0:
        branch_name = f"PR #{pr_info.number}"
        branch_url = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/pull/{pr_info.number}"
    commit_url = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/commit/{pr_info.sha}"
    task_url = GITHUB_RUN_URL
    report = create_build_html_report(
        build_check_name,
        build_results,
        build_logs,
        build_artifacts,
        task_url,
        branch_url,
        branch_name,
        commit_url,
    )

    report_path = os.path.join(temp_path, "report.html")
    with open(report_path, "w", encoding="utf-8") as fd:
        fd.write(report)

    logging.info("Going to upload prepared report")
    context_name_for_path = build_check_name.lower().replace(" ", "_")
    s3_path_prefix = (
        str(pr_info.number) + "/" + pr_info.sha + "/" + context_name_for_path
    )

    url = s3_helper.upload_build_file_to_s3(
        report_path, s3_path_prefix + "/report.html"
    )
    logging.info("Report url %s", url)
    print(f"::notice ::Report url: {url}")

    # Prepare a commit status
    ok_groups = 0
    summary_status = "success"
    for build_result in build_results:
        if build_result.status == "failure" and summary_status != "error":
            summary_status = "failure"
        if build_result.status == "error" or not build_result.status:
            summary_status = "error"

        if build_result.status == "success":
            ok_groups += 1

    if ok_groups == 0 or some_builds_are_missing:
        summary_status = "error"

    addition = ""
    if some_builds_are_missing:
        addition = f"({len(build_reports)} of {required_builds} builds are OK)"

    description = f"{ok_groups}/{total_groups} artifact groups are OK {addition}"

    commit = get_commit(gh, pr_info.sha)
    commit.create_status(
        context=build_check_name,
        description=description,
        state=summary_status,
        target_url=url,
    )

    if summary_status == "error":
        sys.exit(1)
Ejemplo n.º 19
0
    return f"docker run --network=host --volume={build_path}:/package_folder" \
           f" --volume={server_log_folder}:/var/log/clickhouse-server" \
           f" --volume={result_folder}:/test_output" \
           f" {docker_image} >{result_folder}/{RESULT_LOG_NAME}"


if __name__ == "__main__":
    logging.basicConfig(level=logging.INFO)

    stopwatch = Stopwatch()

    temp_path = os.getenv("TEMP_PATH", os.path.abspath("."))
    repo_path = os.getenv("REPO_COPY", os.path.abspath("../../"))
    reports_path = os.getenv("REPORTS_PATH", "./reports")

    pr_info = PRInfo(get_event())

    gh = Github(get_best_robot_token())

    for root, _, files in os.walk(reports_path):
        for f in files:
            if f == 'changed_images.json':
                images_path = os.path.join(root, 'changed_images.json')
                break

    docker_image = get_image_with_version(reports_path, DOCKER_IMAGE)

    packages_path = os.path.join(temp_path, "packages")
    if not os.path.exists(packages_path):
        os.makedirs(packages_path)
Ejemplo n.º 20
0
    if re.match(
            r"(?i)doc|((non|in|not|un)[-\s]*significant)|(not[ ]*for[ ]*changelog)",
            category,
    ):
        return ""

    if not entry:
        return f"Changelog entry required for category '{category}'"

    return ""


if __name__ == "__main__":
    logging.basicConfig(level=logging.INFO)

    pr_info = PRInfo(need_orgs=True, labels_from_api=True)
    can_run, description = should_run_checks_for_pr(pr_info)
    gh = Github(get_best_robot_token())
    commit = get_commit(gh, pr_info.sha)

    description_report = check_pr_description(pr_info)[:139]
    if description_report:
        print("::notice ::Cannot run, description does not match the template")
        url = (f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/"
               "blob/master/.github/PULL_REQUEST_TEMPLATE.md?plain=1")
        commit.create_status(
            context=NAME,
            description=description_report,
            state="failure",
            target_url=url,
        )
Ejemplo n.º 21
0
from docker_pull_helper import get_image_with_version
from commit_status_helper import post_commit_status, get_commit

NAME = "Docs Check (actions)"

if __name__ == "__main__":
    logging.basicConfig(level=logging.INFO)

    temp_path = os.path.join(os.getenv("TEMP_PATH"))
    repo_path = os.path.join(os.getenv("REPO_COPY"))

    with open(os.getenv('GITHUB_EVENT_PATH'), 'r',
              encoding='utf-8') as event_file:
        event = json.load(event_file)

    pr_info = PRInfo(event, need_changed_files=True)

    gh = Github(get_best_robot_token())
    if not pr_info.has_changes_in_documentation():
        logging.info("No changes in documentation")
        commit = get_commit(gh, pr_info.sha)
        commit.create_status(context=NAME,
                             description="No changes in docs",
                             state="success")
        sys.exit(0)

    logging.info("Has changes in docs")

    if not os.path.exists(temp_path):
        os.makedirs(temp_path)
Ejemplo n.º 22
0
    Squash statuses to latest state
    1. context="first", state="success", update_time=1
    2. context="second", state="success", update_time=2
    3. context="first", stat="failure", update_time=3
    =========>
    1. context="second", state="success"
    2. context="first", stat="failure"
    """
    filt = {}
    for status in sorted(statuses, key=lambda x: x.updated_at):
        filt[status.context] = status
    return filt


if __name__ == "__main__":
    logging.basicConfig(level=logging.INFO)

    pr_info = PRInfo(need_orgs=True)
    gh = Github(get_best_robot_token())
    commit = get_commit(gh, pr_info.sha)

    url = GITHUB_RUN_URL
    statuses = filter_statuses(list(commit.get_statuses()))
    if NAME in statuses and statuses[NAME].state == "pending":
        commit.create_status(
            context=NAME,
            description="All checks finished",
            state="success",
            target_url=url,
        )
Ejemplo n.º 23
0
def main():
    logging.basicConfig(level=logging.INFO)

    build_name = sys.argv[1]

    build_config = CI_CONFIG["build_config"][build_name]

    if not os.path.exists(TEMP_PATH):
        os.makedirs(TEMP_PATH)

    pr_info = PRInfo()

    logging.info("Repo copy path %s", REPO_COPY)

    s3_helper = S3Helper()

    version = get_version_from_repo(git=Git(True))
    release_or_pr, performance_pr = get_release_or_pr(pr_info, version)

    s3_path_prefix = "/".join((release_or_pr, pr_info.sha, build_name))
    # FIXME performance
    s3_performance_path = "/".join(
        (performance_pr, pr_info.sha, build_name, "performance.tgz"))

    # If this is rerun, then we try to find already created artifacts and just
    # put them as github actions artifact (result)
    check_for_success_run(s3_helper, s3_path_prefix, build_name, build_config)

    docker_image = get_image_with_version(IMAGES_PATH, IMAGE_NAME)
    image_version = docker_image.version

    logging.info("Got version from repo %s", version.string)

    official_flag = pr_info.number == 0
    if "official" in build_config:
        official_flag = build_config["official"]

    version_type = "testing"
    if "release" in pr_info.labels or "release-lts" in pr_info.labels:
        version_type = "stable"
        official_flag = True

    update_version_local(version, version_type)

    logging.info("Updated local files with version")

    logging.info("Build short name %s", build_name)

    build_output_path = os.path.join(TEMP_PATH, build_name)
    if not os.path.exists(build_output_path):
        os.makedirs(build_output_path)

    ccache_path = os.path.join(CACHES_PATH, build_name + "_ccache")

    logging.info("Will try to fetch cache for our build")
    try:
        get_ccache_if_not_exists(ccache_path, s3_helper, pr_info.number,
                                 TEMP_PATH)
    except Exception as e:
        # In case there are issues with ccache, remove the path and do not fail a build
        logging.info("Failed to get ccache, building without it. Error: %s", e)
        rmtree(ccache_path, ignore_errors=True)

    if not os.path.exists(ccache_path):
        logging.info("cache was not fetched, will create empty dir")
        os.makedirs(ccache_path)

    packager_cmd = get_packager_cmd(
        build_config,
        os.path.join(REPO_COPY, "docker/packager"),
        build_output_path,
        version.string,
        image_version,
        ccache_path,
        official_flag,
    )

    logging.info("Going to run packager with %s", packager_cmd)

    logs_path = os.path.join(TEMP_PATH, "build_log")
    if not os.path.exists(logs_path):
        os.makedirs(logs_path)

    start = time.time()
    log_path, success = build_clickhouse(packager_cmd, logs_path,
                                         build_output_path)
    elapsed = int(time.time() - start)
    subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {build_output_path}",
                          shell=True)
    subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {ccache_path}",
                          shell=True)
    logging.info("Build finished with %s, log path %s", success, log_path)

    # Upload the ccache first to have the least build time in case of problems
    logging.info("Will upload cache")
    upload_ccache(ccache_path, s3_helper, pr_info.number, TEMP_PATH)

    # FIXME performance
    performance_urls = []
    performance_path = os.path.join(build_output_path, "performance.tgz")
    if os.path.exists(performance_path):
        performance_urls.append(
            s3_helper.upload_build_file_to_s3(performance_path,
                                              s3_performance_path))
        logging.info(
            "Uploaded performance.tgz to %s, now delete to avoid duplication",
            performance_urls[0],
        )
        os.remove(performance_path)

    build_urls = (s3_helper.upload_build_folder_to_s3(
        build_output_path,
        s3_path_prefix,
        keep_dirs_in_s3_path=False,
        upload_symlinks=False,
    ) + performance_urls)
    logging.info("Got build URLs %s", build_urls)

    print("::notice ::Build URLs: {}".format("\n".join(build_urls)))

    if os.path.exists(log_path):
        log_url = s3_helper.upload_build_file_to_s3(
            log_path, s3_path_prefix + "/" + os.path.basename(log_path))
        logging.info("Log url %s", log_url)
    else:
        logging.info("Build log doesn't exist")

    print(f"::notice ::Log URL: {log_url}")

    create_json_artifact(TEMP_PATH, build_name, log_url, build_urls,
                         build_config, elapsed, success)

    upload_master_static_binaries(pr_info, build_config, s3_helper,
                                  build_output_path)
    # Fail build job if not successeded
    if not success:
        sys.exit(1)
Ejemplo n.º 24
0
if __name__ == "__main__":
    logging.basicConfig(level=logging.INFO)

    stopwatch = Stopwatch()

    temp_path = TEMP_PATH
    repo_path = REPO_COPY
    reports_path = REPORTS_PATH

    check_name = sys.argv[1]
    kill_timeout = int(sys.argv[2])

    flaky_check = 'flaky' in check_name.lower()
    gh = Github(get_best_robot_token())

    pr_info = PRInfo(need_changed_files=flaky_check)

    if 'RUN_BY_HASH_NUM' in os.environ:
        run_by_hash_num = int(os.getenv('RUN_BY_HASH_NUM'))
        run_by_hash_total = int(os.getenv('RUN_BY_HASH_TOTAL'))
        check_name_with_group = check_name + f' [{run_by_hash_num + 1}/{run_by_hash_total}]'
    else:
        run_by_hash_num = 0
        run_by_hash_total = 0
        check_name_with_group = check_name

    rerun_helper = RerunHelper(gh, pr_info, check_name_with_group)
    if rerun_helper.is_already_finished_by_status():
        logging.info("Check is already finished according to github status, exiting")
        sys.exit(0)
Ejemplo n.º 25
0
def main():
    logging.basicConfig(level=logging.INFO)
    stopwatch = Stopwatch()

    args = parse_args()
    push = not args.no_push_images
    if push:
        subprocess.check_output(  # pylint: disable=unexpected-keyword-arg
            "docker login --username 'robotclickhouse' --password-stdin",
            input=get_parameter_from_ssm("dockerhub_robot_password"),
            encoding="utf-8",
            shell=True,
        )

    to_merge = {}
    for suf in args.suffixes:
        to_merge[suf] = load_images(args.path, suf)

    changed_images = get_changed_images(check_sources(to_merge))

    os.environ["DOCKER_CLI_EXPERIMENTAL"] = "enabled"
    merged = merge_images(to_merge)

    status = "success"
    test_results = []  # type: List[Tuple[str, str]]
    for image, versions in merged.items():
        for tags in versions:
            manifest, test_result = create_manifest(image, tags, push)
            test_results.append((manifest, test_result))
            if test_result != "OK":
                status = "failure"

    with open(os.path.join(args.path, "changed_images.json"), "w") as ci:
        json.dump(changed_images, ci)

    pr_info = PRInfo()
    s3_helper = S3Helper("https://s3.amazonaws.com")

    url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results,
                         [], NAME)

    print("::notice ::Report url: {}".format(url))
    print('::set-output name=url_output::"{}"'.format(url))

    if args.no_reports:
        return

    if changed_images:
        description = "Updated " + ", ".join(changed_images.keys())
    else:
        description = "Nothing to update"

    if len(description) >= 140:
        description = description[:136] + "..."

    gh = Github(get_best_robot_token())
    post_commit_status(gh, pr_info.sha, NAME, description, status, url)

    prepared_events = prepare_tests_results_for_clickhouse(
        pr_info,
        test_results,
        status,
        stopwatch.duration_seconds,
        stopwatch.start_time_str,
        url,
        NAME,
    )
    ch_helper = ClickHouseHelper()
    ch_helper.insert_events_into(db="gh-data",
                                 table="checks",
                                 events=prepared_events)
Ejemplo n.º 26
0
    temp_path = os.getenv("TEMP_PATH", os.path.abspath("."))
    repo_path = os.getenv("REPO_COPY", os.path.abspath("../../"))
    reports_path = os.getenv("REPORTS_PATH", "./reports")

    check_name = sys.argv[1]
    kill_timeout = int(sys.argv[2])
    flaky_check = 'flaky' in check_name.lower()

    if not os.path.exists(temp_path):
        os.makedirs(temp_path)

    with open(os.getenv('GITHUB_EVENT_PATH'), 'r', encoding='utf-8') as event_file:
        event = json.load(event_file)

    gh = Github(get_best_robot_token())
    pr_info = PRInfo(event, need_changed_files=flaky_check)
    tests_to_run = []
    if flaky_check:
        tests_to_run = get_tests_to_run(pr_info)
        if not tests_to_run:
            commit = get_commit(gh, pr_info.sha)
            commit.create_status(context=check_name, description='Not found changed stateless tests', state='success')
            sys.exit(0)


    image_name = get_image_name(check_name)
    docker_image = get_image_with_version(reports_path, image_name)

    packages_path = os.path.join(temp_path, "packages")
    if not os.path.exists(packages_path):
        os.makedirs(packages_path)
Ejemplo n.º 27
0
def main():
    logging.basicConfig(level=logging.INFO)

    build_check_name = sys.argv[1]
    build_name = sys.argv[2]

    build_config = get_build_config(build_check_name, build_name)

    if not os.path.exists(TEMP_PATH):
        os.makedirs(TEMP_PATH)

    pr_info = PRInfo()

    logging.info("Repo copy path %s", REPO_COPY)

    s3_helper = S3Helper("https://s3.amazonaws.com")

    version = get_version_from_repo(REPO_COPY)
    release_or_pr = get_release_or_pr(pr_info, build_config, version)

    s3_path_prefix = "/".join((release_or_pr, pr_info.sha, build_name))

    # If this is rerun, then we try to find already created artifacts and just
    # put them as github actions artifcat (result)
    build_results = get_build_results_if_exists(s3_helper, s3_path_prefix)
    if build_results is not None and len(build_results) > 0:
        logging.info("Some build results found %s", build_results)
        build_urls = []
        log_url = ""
        for url in build_results:
            if "build_log.log" in url:
                log_url = "https://s3.amazonaws.com/clickhouse-builds/" + url.replace(
                    "+", "%2B").replace(" ", "%20")
            else:
                build_urls.append(
                    "https://s3.amazonaws.com/clickhouse-builds/" +
                    url.replace("+", "%2B").replace(" ", "%20"))
        create_json_artifact(
            TEMP_PATH,
            build_name,
            log_url,
            build_urls,
            build_config,
            0,
            len(build_urls) > 0,
        )
        return

    image_name = get_image_name(build_config)
    docker_image = get_image_with_version(IMAGES_PATH, image_name)
    image_version = docker_image.version

    logging.info("Got version from repo %s", version.get_version_string())

    version_type = "testing"
    if "release" in pr_info.labels or "release-lts" in pr_info.labels:
        version_type = "stable"

    update_version_local(REPO_COPY, pr_info.sha, version, version_type)

    logging.info("Updated local files with version")

    logging.info("Build short name %s", build_name)

    build_output_path = os.path.join(TEMP_PATH, build_name)
    if not os.path.exists(build_output_path):
        os.makedirs(build_output_path)

    ccache_path = os.path.join(CACHES_PATH, build_name + "_ccache")

    logging.info("Will try to fetch cache for our build")
    get_ccache_if_not_exists(ccache_path, s3_helper, pr_info.number, TEMP_PATH)

    if not os.path.exists(ccache_path):
        logging.info("cache was not fetched, will create empty dir")
        os.makedirs(ccache_path)

    if build_config["package_type"] == "performance" and pr_info.number != 0:
        # because perf tests store some information about git commits
        subprocess.check_call(
            f"cd {REPO_COPY} && git fetch origin master:master", shell=True)

    packager_cmd = get_packager_cmd(
        build_config,
        os.path.join(REPO_COPY, "docker/packager"),
        build_output_path,
        version.get_version_string(),
        image_version,
        ccache_path,
        pr_info,
    )
    logging.info("Going to run packager with %s", packager_cmd)

    build_clickhouse_log = os.path.join(TEMP_PATH, "build_log")
    if not os.path.exists(build_clickhouse_log):
        os.makedirs(build_clickhouse_log)

    start = time.time()
    log_path, success = build_clickhouse(packager_cmd, build_clickhouse_log,
                                         build_output_path)
    elapsed = int(time.time() - start)
    subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {build_output_path}",
                          shell=True)
    subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {ccache_path}",
                          shell=True)
    logging.info("Build finished with %s, log path %s", success, log_path)

    logging.info("Will upload cache")
    upload_ccache(ccache_path, s3_helper, pr_info.number, TEMP_PATH)

    if os.path.exists(log_path):
        log_url = s3_helper.upload_build_file_to_s3(
            log_path, s3_path_prefix + "/" + os.path.basename(log_path))
        logging.info("Log url %s", log_url)
    else:
        logging.info("Build log doesn't exist")

    build_urls = s3_helper.upload_build_folder_to_s3(
        build_output_path,
        s3_path_prefix,
        keep_dirs_in_s3_path=False,
        upload_symlinks=False,
    )
    logging.info("Got build URLs %s", build_urls)

    print("::notice ::Build URLs: {}".format("\n".join(build_urls)))

    print("::notice ::Log URL: {}".format(log_url))

    create_json_artifact(TEMP_PATH, build_name, log_url, build_urls,
                         build_config, elapsed, success)

    upload_master_static_binaries(pr_info, build_config, s3_helper,
                                  build_output_path)
    # Fail build job if not successeded
    if not success:
        sys.exit(1)
Ejemplo n.º 28
0
def main():
    logging.basicConfig(level=logging.INFO)
    stopwatch = Stopwatch()
    makedirs(TEMP_PATH, exist_ok=True)

    args = parse_args()
    image = DockerImage(args.image_path, args.image_repo, False)
    args.release_type = auto_release_type(args.version, args.release_type)
    tags = gen_tags(args.version, args.release_type)
    NAME = f"Docker image {image.repo} building check"
    pr_info = None
    if CI:
        pr_info = PRInfo()
        release_or_pr, _ = get_release_or_pr(pr_info, args.version)
        args.bucket_prefix = (
            f"{S3_DOWNLOAD}/{S3_BUILDS_BUCKET}/{release_or_pr}/{pr_info.sha}"
        )

    if args.push:
        subprocess.check_output(  # pylint: disable=unexpected-keyword-arg
            "docker login --username 'robotclickhouse' --password-stdin",
            input=get_parameter_from_ssm("dockerhub_robot_password"),
            encoding="utf-8",
            shell=True,
        )
        NAME = f"Docker image {image.repo} build and push"

    logging.info("Following tags will be created: %s", ", ".join(tags))
    status = "success"
    test_results = []  # type: List[Tuple[str, str]]
    for os in args.os:
        for tag in tags:
            test_results.extend(
                build_and_push_image(
                    image, args.push, args.bucket_prefix, os, tag, args.version
                )
            )
            if test_results[-1][1] != "OK":
                status = "failure"

    pr_info = pr_info or PRInfo()
    s3_helper = S3Helper()

    url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results, [], NAME)

    print(f"::notice ::Report url: {url}")
    print(f'::set-output name=url_output::"{url}"')

    if not args.reports:
        return

    description = f"Processed tags: {', '.join(tags)}"

    if len(description) >= 140:
        description = description[:136] + "..."

    gh = Github(get_best_robot_token(), per_page=100)
    post_commit_status(gh, pr_info.sha, NAME, description, status, url)

    prepared_events = prepare_tests_results_for_clickhouse(
        pr_info,
        test_results,
        status,
        stopwatch.duration_seconds,
        stopwatch.start_time_str,
        url,
        NAME,
    )
    ch_helper = ClickHouseHelper()
    ch_helper.insert_events_into(db="default", table="checks", events=prepared_events)
    if status != "success":
        sys.exit(1)
Ejemplo n.º 29
0
    return f"docker run --network=host --volume={build_path}:/package_folder" \
           f" --volume={server_log_folder}:/var/log/clickhouse-server" \
           f" --volume={result_folder}:/test_output" \
           f" {docker_image} >{result_folder}/{RESULT_LOG_NAME}"


if __name__ == "__main__":
    logging.basicConfig(level=logging.INFO)

    stopwatch = Stopwatch()

    temp_path = TEMP_PATH
    repo_path = REPO_COPY
    reports_path = REPORTS_PATH

    pr_info = PRInfo()

    gh = Github(get_best_robot_token())

    rerun_helper = RerunHelper(gh, pr_info, CHECK_NAME)
    if rerun_helper.is_already_finished_by_status():
        logging.info(
            "Check is already finished according to github status, exiting")
        sys.exit(0)

    for root, _, files in os.walk(reports_path):
        for f in files:
            if f == 'changed_images.json':
                images_path = os.path.join(root, 'changed_images.json')
                break
Ejemplo n.º 30
0
def main():
    logging.basicConfig(level=logging.INFO)
    stopwatch = Stopwatch()

    args = parse_args()
    if args.suffix:
        global NAME
        NAME += f" {args.suffix}"
        changed_json = os.path.join(TEMP_PATH,
                                    f"changed_images_{args.suffix}.json")
    else:
        changed_json = os.path.join(TEMP_PATH, "changed_images.json")

    push = not args.no_push_images
    if push:
        subprocess.check_output(  # pylint: disable=unexpected-keyword-arg
            "docker login --username 'robotclickhouse' --password-stdin",
            input=get_parameter_from_ssm("dockerhub_robot_password"),
            encoding="utf-8",
            shell=True,
        )

    repo_path = GITHUB_WORKSPACE

    if os.path.exists(TEMP_PATH):
        shutil.rmtree(TEMP_PATH)
    os.makedirs(TEMP_PATH)

    if args.image_path:
        pr_info = PRInfo()
        pr_info.changed_files = set(i for i in args.image_path)
    else:
        pr_info = PRInfo(need_changed_files=True)

    changed_images = get_changed_docker_images(pr_info, repo_path,
                                               "docker/images.json")
    logging.info("Has changed images %s",
                 ", ".join([str(image[0]) for image in changed_images]))
    pr_commit_version = str(pr_info.number) + "-" + pr_info.sha
    # The order is important, PR number is used as cache during the build
    versions = [str(pr_info.number), pr_commit_version]
    result_version = pr_commit_version
    if pr_info.number == 0:
        # First get the latest for cache
        versions.insert(0, "latest")

    if args.suffix:
        # We should build architecture specific images separately and merge a
        # manifest lately in a different script
        versions = [f"{v}-{args.suffix}" for v in versions]
        # changed_images_{suffix}.json should contain all changed images
        result_version = versions

    result_images = {}
    images_processing_result = []
    for rel_path, image_name in changed_images:
        full_path = os.path.join(repo_path, rel_path)
        images_processing_result += process_single_image(
            versions, full_path, image_name, push)
        result_images[image_name] = result_version

    if changed_images:
        description = "Updated " + ",".join([im[1] for im in changed_images])
    else:
        description = "Nothing to update"

    if len(description) >= 140:
        description = description[:136] + "..."

    with open(changed_json, "w") as images_file:
        json.dump(result_images, images_file)

    s3_helper = S3Helper("https://s3.amazonaws.com")

    s3_path_prefix = (str(pr_info.number) + "/" + pr_info.sha + "/" +
                      NAME.lower().replace(" ", "_"))
    status, test_results = process_test_results(s3_helper,
                                                images_processing_result,
                                                s3_path_prefix)

    url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results,
                         [], NAME)

    print("::notice ::Report url: {}".format(url))
    print('::set-output name=url_output::"{}"'.format(url))

    if args.no_reports:
        return

    gh = Github(get_best_robot_token())
    post_commit_status(gh, pr_info.sha, NAME, description, status, url)

    prepared_events = prepare_tests_results_for_clickhouse(
        pr_info,
        test_results,
        status,
        stopwatch.duration_seconds,
        stopwatch.start_time_str,
        url,
        NAME,
    )
    ch_helper = ClickHouseHelper()
    ch_helper.insert_events_into(db="gh-data",
                                 table="checks",
                                 events=prepared_events)