def __init__(self, url=None): if url is None: url = get_parameter_from_ssm("clickhouse-test-stat-url") self.url = url self.auth = { "X-ClickHouse-User": get_parameter_from_ssm("clickhouse-test-stat-login"), "X-ClickHouse-Key": get_parameter_from_ssm("clickhouse-test-stat-password"), }
def __init__(self, url=None, user=None, password=None): if url is None: url = get_parameter_from_ssm("clickhouse-test-stat-url") self.url = url self.auth = { 'X-ClickHouse-User': user if user is not None else get_parameter_from_ssm("clickhouse-test-stat-login"), 'X-ClickHouse-Key': password if password is not None else get_parameter_from_ssm("clickhouse-test-stat-password") }
def main(): logging.basicConfig(level=logging.INFO) stopwatch = Stopwatch() args = parse_args() if args.suffix: global NAME NAME += f" {args.suffix}" changed_json = os.path.join(TEMP_PATH, f"changed_images_{args.suffix}.json") else: changed_json = os.path.join(TEMP_PATH, "changed_images.json") push = not args.no_push_images if push: subprocess.check_output( # pylint: disable=unexpected-keyword-arg "docker login --username 'robotclickhouse' --password-stdin", input=get_parameter_from_ssm("dockerhub_robot_password"), encoding="utf-8", shell=True, ) if os.path.exists(TEMP_PATH): shutil.rmtree(TEMP_PATH) os.makedirs(TEMP_PATH) if args.image_path: pr_info = PRInfo() pr_info.changed_files = set(i for i in args.image_path) else: pr_info = PRInfo(need_changed_files=True) changed_images = get_changed_docker_images(pr_info, GITHUB_WORKSPACE, "docker/images.json") logging.info("Has changed images %s", ", ".join([im.path for im in changed_images])) image_versions, result_version = gen_versions(pr_info, args.suffix) result_images = {} images_processing_result = [] for image in changed_images: images_processing_result += process_image_with_parents( image, image_versions, push) result_images[image.repo] = result_version if changed_images: description = "Updated " + ",".join([im.repo for im in changed_images]) else: description = "Nothing to update" if len(description) >= 140: description = description[:136] + "..." with open(changed_json, "w", encoding="utf-8") as images_file: json.dump(result_images, images_file) s3_helper = S3Helper("https://s3.amazonaws.com") s3_path_prefix = (str(pr_info.number) + "/" + pr_info.sha + "/" + NAME.lower().replace(" ", "_")) status, test_results = process_test_results(s3_helper, images_processing_result, s3_path_prefix) url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results, [], NAME) print(f"::notice ::Report url: {url}") print(f'::set-output name=url_output::"{url}"') if args.no_reports: return gh = Github(get_best_robot_token()) post_commit_status(gh, pr_info.sha, NAME, description, status, url) prepared_events = prepare_tests_results_for_clickhouse( pr_info, test_results, status, stopwatch.duration_seconds, stopwatch.start_time_str, url, NAME, ) ch_helper = ClickHouseHelper() ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events)
def main(): logging.basicConfig(level=logging.INFO) stopwatch = Stopwatch() args = parse_args() push = not args.no_push_images if push: subprocess.check_output( # pylint: disable=unexpected-keyword-arg "docker login --username 'robotclickhouse' --password-stdin", input=get_parameter_from_ssm("dockerhub_robot_password"), encoding="utf-8", shell=True, ) to_merge = {} for suf in args.suffixes: to_merge[suf] = load_images(args.path, suf) changed_images = get_changed_images(check_sources(to_merge)) os.environ["DOCKER_CLI_EXPERIMENTAL"] = "enabled" merged = merge_images(to_merge) status = "success" test_results = [] # type: List[Tuple[str, str]] for image, versions in merged.items(): for tags in versions: manifest, test_result = create_manifest(image, tags, push) test_results.append((manifest, test_result)) if test_result != "OK": status = "failure" with open(os.path.join(args.path, "changed_images.json"), "w") as ci: json.dump(changed_images, ci) pr_info = PRInfo() s3_helper = S3Helper("https://s3.amazonaws.com") url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results, [], NAME) print("::notice ::Report url: {}".format(url)) print('::set-output name=url_output::"{}"'.format(url)) if args.no_reports: return if changed_images: description = "Updated " + ", ".join(changed_images.keys()) else: description = "Nothing to update" if len(description) >= 140: description = description[:136] + "..." gh = Github(get_best_robot_token()) post_commit_status(gh, pr_info.sha, NAME, description, status, url) prepared_events = prepare_tests_results_for_clickhouse( pr_info, test_results, status, stopwatch.duration_seconds, stopwatch.start_time_str, url, NAME, ) ch_helper = ClickHouseHelper() ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events)
url_part += '<a href="{}">push_log</a>'.format(push_url) if url_part: test_name = image + ' (' + url_part + ')' else: test_name = image processed_test_results.append((test_name, status)) return overall_status, processed_test_results if __name__ == "__main__": logging.basicConfig(level=logging.INFO) repo_path = os.getenv("GITHUB_WORKSPACE", os.path.abspath("../../")) temp_path = os.path.join( os.getenv("RUNNER_TEMP", os.path.abspath("./temp")), 'docker_images_check') dockerhub_password = get_parameter_from_ssm('dockerhub_robot_password') if os.path.exists(temp_path): shutil.rmtree(temp_path) if not os.path.exists(temp_path): os.makedirs(temp_path) with open(os.getenv('GITHUB_EVENT_PATH'), 'r') as event_file: event = json.load(event_file) pr_info = PRInfo(event, False, True) changed_images, dockerhub_repo_name = get_changed_docker_images( pr_info, repo_path, "docker/images.json") logging.info("Has changed images %s", ', '.join([str(image[0]) for image in changed_images]))
def main(): logging.basicConfig(level=logging.INFO) stopwatch = Stopwatch() makedirs(TEMP_PATH, exist_ok=True) args = parse_args() image = DockerImage(args.image_path, args.image_repo, False) args.release_type = auto_release_type(args.version, args.release_type) tags = gen_tags(args.version, args.release_type) NAME = f"Docker image {image.repo} building check" pr_info = None if CI: pr_info = PRInfo() release_or_pr, _ = get_release_or_pr(pr_info, args.version) args.bucket_prefix = ( f"{S3_DOWNLOAD}/{S3_BUILDS_BUCKET}/{release_or_pr}/{pr_info.sha}" ) if args.push: subprocess.check_output( # pylint: disable=unexpected-keyword-arg "docker login --username 'robotclickhouse' --password-stdin", input=get_parameter_from_ssm("dockerhub_robot_password"), encoding="utf-8", shell=True, ) NAME = f"Docker image {image.repo} build and push" logging.info("Following tags will be created: %s", ", ".join(tags)) status = "success" test_results = [] # type: List[Tuple[str, str]] for os in args.os: for tag in tags: test_results.extend( build_and_push_image( image, args.push, args.bucket_prefix, os, tag, args.version ) ) if test_results[-1][1] != "OK": status = "failure" pr_info = pr_info or PRInfo() s3_helper = S3Helper() url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results, [], NAME) print(f"::notice ::Report url: {url}") print(f'::set-output name=url_output::"{url}"') if not args.reports: return description = f"Processed tags: {', '.join(tags)}" if len(description) >= 140: description = description[:136] + "..." gh = Github(get_best_robot_token(), per_page=100) post_commit_status(gh, pr_info.sha, NAME, description, status, url) prepared_events = prepare_tests_results_for_clickhouse( pr_info, test_results, status, stopwatch.duration_seconds, stopwatch.start_time_str, url, NAME, ) ch_helper = ClickHouseHelper() ch_helper.insert_events_into(db="default", table="checks", events=prepared_events) if status != "success": sys.exit(1)
# This check run separately from other checks because it requires exclusive # run (see .github/workflows/jepsen.yml) So we cannot add explicit # dependency on a build job and using busy loop on it's results. For the # same reason we are using latest docker image. build_url = f"https://s3.amazonaws.com/clickhouse-builds/{release_or_pr}/{pr_info.sha}/{build_name}/clickhouse" head = requests.head(build_url) counter = 0 while head.status_code != 200: time.sleep(10) head = requests.head(build_url) counter += 1 if counter >= 180: logging.warning("Cannot fetch build in 30 minutes, exiting") sys.exit(0) with SSHKey(key_value=get_parameter_from_ssm("jepsen_ssh_key") + "\n"): ssh_auth_sock = os.environ["SSH_AUTH_SOCK"] auth_sock_dir = os.path.dirname(ssh_auth_sock) cmd = get_run_command( ssh_auth_sock, auth_sock_dir, pr_info, nodes_path, REPO_COPY, build_url, result_path, docker_image, ) logging.info("Going to run jepsen: %s", cmd) run_log_path = os.path.join(TEMP_PATH, "runlog.log")
from cherry_pick_utils.backport import Backport from cherry_pick_utils.cherrypick import CherryPick if __name__ == "__main__": logging.basicConfig(level=logging.INFO) repo_path = os.path.join( os.getenv("GITHUB_WORKSPACE", os.path.abspath("../../"))) temp_path = os.path.join(os.getenv("TEMP_PATH")) if not os.path.exists(temp_path): os.makedirs(temp_path) sys.path.append(os.path.join(repo_path, "utils/github")) with SSHKey("ROBOT_CLICKHOUSE_SSH_KEY"): token = get_parameter_from_ssm("github_robot_token_1") bp = Backport(token, os.environ.get("REPO_OWNER"), os.environ.get("REPO_NAME"), os.environ.get("REPO_TEAM")) def cherrypick_run(token, pr, branch): return CherryPick(token, os.environ.get("REPO_OWNER"), os.environ.get("REPO_NAME"), os.environ.get("REPO_TEAM"), pr, branch).execute(repo_path, False) try: bp.execute(repo_path, 'origin', None, cherrypick_run) except subprocess.CalledProcessError as e: logging.error(e.output)
images_path = os.path.join(temp_path, 'changed_images.json') docker_image = 'clickhouse/pvs-test' if os.path.exists(images_path): logging.info("Images file exists") with open(images_path, 'r') as images_fd: images = json.load(images_fd) logging.info("Got images %s", images) if 'clickhouse/pvs-test' in images: docker_image += ':' + images['clickhouse/pvs-test'] logging.info("Got docker image %s", docker_image) s3_helper = S3Helper('https://s3.amazonaws.com') licence_key = get_parameter_from_ssm('pvs_studio_key') cmd = f"docker run -u $(id -u ${{USER}}):$(id -g ${{USER}}) --volume={repo_path}:/repo_folder --volume={temp_path}:/test_output -e LICENCE_NAME='{LICENCE_NAME}' -e LICENCE_KEY='{licence_key}' {docker_image}" commit = get_commit(gh, pr_info.sha) run_log_path = os.path.join(temp_path, 'run_log.log') with TeePopen(cmd, run_log_path) as process: retcode = process.wait() if retcode != 0: logging.info("Run failed") else: logging.info("Run Ok") if retcode != 0: commit.create_status( context=NAME,
" -e CLICKHOUSE_PERFORMANCE_COMPARISON_DATABASE_USER_PASSWORD") run_command = get_run_command( result_path, result_path, repo_tests_path, pr_info.number, pr_info.sha, docker_env, docker_image, ) logging.info("Going to run command %s", run_command) popen_env = os.environ.copy() database_url = get_parameter_from_ssm("clickhouse-test-stat-url") database_username = get_parameter_from_ssm("clickhouse-test-stat-login") database_password = get_parameter_from_ssm("clickhouse-test-stat-password") popen_env.update({ "CLICKHOUSE_PERFORMANCE_COMPARISON_DATABASE_URL": f"{database_url}:9440", "CLICKHOUSE_PERFORMANCE_COMPARISON_DATABASE_USER": database_username, "CLICKHOUSE_PERFORMANCE_COMPARISON_DATABASE_USER_PASSWORD": database_password, "CLICKHOUSE_PERFORMANCE_COMPARISON_CHECK_NAME": check_name_with_group, "CLICKHOUSE_PERFORMANCE_COMPARISON_CHECK_NAME_PREFIX": check_name_prefix, })
def main(): logging.basicConfig(level=logging.INFO) stopwatch = Stopwatch() args = parse_args() if args.suffix: global NAME NAME += f" {args.suffix}" changed_json = os.path.join(TEMP_PATH, f"changed_images_{args.suffix}.json") else: changed_json = os.path.join(TEMP_PATH, "changed_images.json") if args.push: subprocess.check_output( # pylint: disable=unexpected-keyword-arg "docker login --username 'robotclickhouse' --password-stdin", input=get_parameter_from_ssm("dockerhub_robot_password"), encoding="utf-8", shell=True, ) if os.path.exists(TEMP_PATH): shutil.rmtree(TEMP_PATH) os.makedirs(TEMP_PATH) images_dict = get_images_dict(GITHUB_WORKSPACE, "docker/images.json") pr_info = PRInfo() if args.all: pr_info.changed_files = set(images_dict.keys()) elif args.image_path: pr_info.changed_files = set(i for i in args.image_path) else: try: pr_info.fetch_changed_files() except TypeError: # If the event does not contain diff, nothing will be built pass changed_images = get_changed_docker_images(pr_info, images_dict) if changed_images: logging.info("Has changed images: %s", ", ".join([im.path for im in changed_images])) image_versions, result_version = gen_versions(pr_info, args.suffix) result_images = {} images_processing_result = [] for image in changed_images: # If we are in backport PR, then pr_info.release_pr is defined # We use it as tag to reduce rebuilding time images_processing_result += process_image_with_parents( image, image_versions, pr_info.release_pr, args.push) result_images[image.repo] = result_version if changed_images: description = "Updated " + ",".join([im.repo for im in changed_images]) else: description = "Nothing to update" if len(description) >= 140: description = description[:136] + "..." with open(changed_json, "w", encoding="utf-8") as images_file: json.dump(result_images, images_file) s3_helper = S3Helper() s3_path_prefix = (str(pr_info.number) + "/" + pr_info.sha + "/" + NAME.lower().replace(" ", "_")) status, test_results = process_test_results(s3_helper, images_processing_result, s3_path_prefix) url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results, [], NAME) print(f"::notice ::Report url: {url}") print(f'::set-output name=url_output::"{url}"') if not args.reports: return gh = Github(get_best_robot_token(), per_page=100) post_commit_status(gh, pr_info.sha, NAME, description, status, url) prepared_events = prepare_tests_results_for_clickhouse( pr_info, test_results, status, stopwatch.duration_seconds, stopwatch.start_time_str, url, NAME, ) ch_helper = ClickHouseHelper() ch_helper.insert_events_into(db="default", table="checks", events=prepared_events) if status == "error": sys.exit(1)
def main(): logging.basicConfig(level=logging.INFO) stopwatch = Stopwatch() args = parse_args() if args.suffix: global NAME NAME += f" {args.suffix}" changed_json = os.path.join(TEMP_PATH, f"changed_images_{args.suffix}.json") else: changed_json = os.path.join(TEMP_PATH, "changed_images.json") push = not args.no_push_images if push: subprocess.check_output( # pylint: disable=unexpected-keyword-arg "docker login --username 'robotclickhouse' --password-stdin", input=get_parameter_from_ssm("dockerhub_robot_password"), encoding="utf-8", shell=True, ) repo_path = GITHUB_WORKSPACE if os.path.exists(TEMP_PATH): shutil.rmtree(TEMP_PATH) os.makedirs(TEMP_PATH) if args.image_path: pr_info = PRInfo() pr_info.changed_files = set(i for i in args.image_path) else: pr_info = PRInfo(need_changed_files=True) changed_images = get_changed_docker_images(pr_info, repo_path, "docker/images.json") logging.info("Has changed images %s", ", ".join([str(image[0]) for image in changed_images])) pr_commit_version = str(pr_info.number) + "-" + pr_info.sha # The order is important, PR number is used as cache during the build versions = [str(pr_info.number), pr_commit_version] result_version = pr_commit_version if pr_info.number == 0: # First get the latest for cache versions.insert(0, "latest") if args.suffix: # We should build architecture specific images separately and merge a # manifest lately in a different script versions = [f"{v}-{args.suffix}" for v in versions] # changed_images_{suffix}.json should contain all changed images result_version = versions result_images = {} images_processing_result = [] for rel_path, image_name in changed_images: full_path = os.path.join(repo_path, rel_path) images_processing_result += process_single_image( versions, full_path, image_name, push) result_images[image_name] = result_version if changed_images: description = "Updated " + ",".join([im[1] for im in changed_images]) else: description = "Nothing to update" if len(description) >= 140: description = description[:136] + "..." with open(changed_json, "w") as images_file: json.dump(result_images, images_file) s3_helper = S3Helper("https://s3.amazonaws.com") s3_path_prefix = (str(pr_info.number) + "/" + pr_info.sha + "/" + NAME.lower().replace(" ", "_")) status, test_results = process_test_results(s3_helper, images_processing_result, s3_path_prefix) url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results, [], NAME) print("::notice ::Report url: {}".format(url)) print('::set-output name=url_output::"{}"'.format(url)) if args.no_reports: return gh = Github(get_best_robot_token()) post_commit_status(gh, pr_info.sha, NAME, description, status, url) prepared_events = prepare_tests_results_for_clickhouse( pr_info, test_results, status, stopwatch.duration_seconds, stopwatch.start_time_str, url, NAME, ) ch_helper = ClickHouseHelper() ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events)