def test_get_changed_docker_images(self): pr_info = PRInfo(PRInfo.default_event.copy()) pr_info.changed_files = { "docker/test/stateless", "docker/test/base", "docker/docs/builder", } images = sorted( list( di.get_changed_docker_images(pr_info, "/", self.docker_images_path))) self.maxDiff = None expected = sorted([ di.DockerImage("docker/test/base", "clickhouse/test-base"), di.DockerImage("docker/docs/builder", "clickhouse/docs-builder"), di.DockerImage( "docker/test/stateless", "clickhouse/stateless-test", "clickhouse/test-base", ), di.DockerImage( "docker/test/integration/base", "clickhouse/integration-test", "clickhouse/test-base", ), di.DockerImage("docker/test/fuzzer", "clickhouse/fuzzer", "clickhouse/test-base"), di.DockerImage( "docker/test/keeper-jepsen", "clickhouse/keeper-jepsen-test", "clickhouse/test-base", ), di.DockerImage( "docker/docs/check", "clickhouse/docs-check", "clickhouse/docs-builder", ), di.DockerImage( "docker/docs/release", "clickhouse/docs-release", "clickhouse/docs-builder", ), di.DockerImage( "docker/test/stateful", "clickhouse/stateful-test", "clickhouse/stateless-test", ), di.DockerImage( "docker/test/unit", "clickhouse/unit-test", "clickhouse/stateless-test", ), di.DockerImage( "docker/test/stress", "clickhouse/stress-test", "clickhouse/stateful-test", ), ]) self.assertEqual(images, expected)
def main(): logging.basicConfig(level=logging.INFO) stopwatch = Stopwatch() args = parse_args() if args.suffix: global NAME NAME += f" {args.suffix}" changed_json = os.path.join(TEMP_PATH, f"changed_images_{args.suffix}.json") else: changed_json = os.path.join(TEMP_PATH, "changed_images.json") push = not args.no_push_images if push: subprocess.check_output( # pylint: disable=unexpected-keyword-arg "docker login --username 'robotclickhouse' --password-stdin", input=get_parameter_from_ssm("dockerhub_robot_password"), encoding="utf-8", shell=True, ) if os.path.exists(TEMP_PATH): shutil.rmtree(TEMP_PATH) os.makedirs(TEMP_PATH) if args.image_path: pr_info = PRInfo() pr_info.changed_files = set(i for i in args.image_path) else: pr_info = PRInfo(need_changed_files=True) changed_images = get_changed_docker_images(pr_info, GITHUB_WORKSPACE, "docker/images.json") logging.info("Has changed images %s", ", ".join([im.path for im in changed_images])) image_versions, result_version = gen_versions(pr_info, args.suffix) result_images = {} images_processing_result = [] for image in changed_images: images_processing_result += process_image_with_parents( image, image_versions, push) result_images[image.repo] = result_version if changed_images: description = "Updated " + ",".join([im.repo for im in changed_images]) else: description = "Nothing to update" if len(description) >= 140: description = description[:136] + "..." with open(changed_json, "w", encoding="utf-8") as images_file: json.dump(result_images, images_file) s3_helper = S3Helper("https://s3.amazonaws.com") s3_path_prefix = (str(pr_info.number) + "/" + pr_info.sha + "/" + NAME.lower().replace(" ", "_")) status, test_results = process_test_results(s3_helper, images_processing_result, s3_path_prefix) url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results, [], NAME) print(f"::notice ::Report url: {url}") print(f'::set-output name=url_output::"{url}"') if args.no_reports: return gh = Github(get_best_robot_token()) post_commit_status(gh, pr_info.sha, NAME, description, status, url) prepared_events = prepare_tests_results_for_clickhouse( pr_info, test_results, status, stopwatch.duration_seconds, stopwatch.start_time_str, url, NAME, ) ch_helper = ClickHouseHelper() ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events)
def main(): logging.basicConfig(level=logging.INFO) stopwatch = Stopwatch() args = parse_args() if args.suffix: global NAME NAME += f" {args.suffix}" changed_json = os.path.join(TEMP_PATH, f"changed_images_{args.suffix}.json") else: changed_json = os.path.join(TEMP_PATH, "changed_images.json") if args.push: subprocess.check_output( # pylint: disable=unexpected-keyword-arg "docker login --username 'robotclickhouse' --password-stdin", input=get_parameter_from_ssm("dockerhub_robot_password"), encoding="utf-8", shell=True, ) if os.path.exists(TEMP_PATH): shutil.rmtree(TEMP_PATH) os.makedirs(TEMP_PATH) images_dict = get_images_dict(GITHUB_WORKSPACE, "docker/images.json") pr_info = PRInfo() if args.all: pr_info.changed_files = set(images_dict.keys()) elif args.image_path: pr_info.changed_files = set(i for i in args.image_path) else: try: pr_info.fetch_changed_files() except TypeError: # If the event does not contain diff, nothing will be built pass changed_images = get_changed_docker_images(pr_info, images_dict) if changed_images: logging.info("Has changed images: %s", ", ".join([im.path for im in changed_images])) image_versions, result_version = gen_versions(pr_info, args.suffix) result_images = {} images_processing_result = [] for image in changed_images: # If we are in backport PR, then pr_info.release_pr is defined # We use it as tag to reduce rebuilding time images_processing_result += process_image_with_parents( image, image_versions, pr_info.release_pr, args.push) result_images[image.repo] = result_version if changed_images: description = "Updated " + ",".join([im.repo for im in changed_images]) else: description = "Nothing to update" if len(description) >= 140: description = description[:136] + "..." with open(changed_json, "w", encoding="utf-8") as images_file: json.dump(result_images, images_file) s3_helper = S3Helper() s3_path_prefix = (str(pr_info.number) + "/" + pr_info.sha + "/" + NAME.lower().replace(" ", "_")) status, test_results = process_test_results(s3_helper, images_processing_result, s3_path_prefix) url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results, [], NAME) print(f"::notice ::Report url: {url}") print(f'::set-output name=url_output::"{url}"') if not args.reports: return gh = Github(get_best_robot_token(), per_page=100) post_commit_status(gh, pr_info.sha, NAME, description, status, url) prepared_events = prepare_tests_results_for_clickhouse( pr_info, test_results, status, stopwatch.duration_seconds, stopwatch.start_time_str, url, NAME, ) ch_helper = ClickHouseHelper() ch_helper.insert_events_into(db="default", table="checks", events=prepared_events) if status == "error": sys.exit(1)
def main(): logging.basicConfig(level=logging.INFO) stopwatch = Stopwatch() args = parse_args() if args.suffix: global NAME NAME += f" {args.suffix}" changed_json = os.path.join(TEMP_PATH, f"changed_images_{args.suffix}.json") else: changed_json = os.path.join(TEMP_PATH, "changed_images.json") push = not args.no_push_images if push: subprocess.check_output( # pylint: disable=unexpected-keyword-arg "docker login --username 'robotclickhouse' --password-stdin", input=get_parameter_from_ssm("dockerhub_robot_password"), encoding="utf-8", shell=True, ) repo_path = GITHUB_WORKSPACE if os.path.exists(TEMP_PATH): shutil.rmtree(TEMP_PATH) os.makedirs(TEMP_PATH) if args.image_path: pr_info = PRInfo() pr_info.changed_files = set(i for i in args.image_path) else: pr_info = PRInfo(need_changed_files=True) changed_images = get_changed_docker_images(pr_info, repo_path, "docker/images.json") logging.info("Has changed images %s", ", ".join([str(image[0]) for image in changed_images])) pr_commit_version = str(pr_info.number) + "-" + pr_info.sha # The order is important, PR number is used as cache during the build versions = [str(pr_info.number), pr_commit_version] result_version = pr_commit_version if pr_info.number == 0: # First get the latest for cache versions.insert(0, "latest") if args.suffix: # We should build architecture specific images separately and merge a # manifest lately in a different script versions = [f"{v}-{args.suffix}" for v in versions] # changed_images_{suffix}.json should contain all changed images result_version = versions result_images = {} images_processing_result = [] for rel_path, image_name in changed_images: full_path = os.path.join(repo_path, rel_path) images_processing_result += process_single_image( versions, full_path, image_name, push) result_images[image_name] = result_version if changed_images: description = "Updated " + ",".join([im[1] for im in changed_images]) else: description = "Nothing to update" if len(description) >= 140: description = description[:136] + "..." with open(changed_json, "w") as images_file: json.dump(result_images, images_file) s3_helper = S3Helper("https://s3.amazonaws.com") s3_path_prefix = (str(pr_info.number) + "/" + pr_info.sha + "/" + NAME.lower().replace(" ", "_")) status, test_results = process_test_results(s3_helper, images_processing_result, s3_path_prefix) url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results, [], NAME) print("::notice ::Report url: {}".format(url)) print('::set-output name=url_output::"{}"'.format(url)) if args.no_reports: return gh = Github(get_best_robot_token()) post_commit_status(gh, pr_info.sha, NAME, description, status, url) prepared_events = prepare_tests_results_for_clickhouse( pr_info, test_results, status, stopwatch.duration_seconds, stopwatch.start_time_str, url, NAME, ) ch_helper = ClickHouseHelper() ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events)