Exemplo n.º 1
0
def download_builds_filter(check_name, reports_path, result_path, filter_fn=lambda _: True):
    build_config = get_build_config_for_check(check_name)
    print(build_config)
    build_config_str = build_config_to_string(build_config)
    print(build_config_str)
    urls = get_build_urls(build_config_str, reports_path)
    print(urls)

    if not urls:
        raise Exception("No build URLs found")

    download_builds(result_path, urls, filter_fn)
Exemplo n.º 2
0
    if not os.path.exists(temp_path):
        os.makedirs(temp_path)

    with open(os.getenv('GITHUB_EVENT_PATH'), 'r',
              encoding='utf-8') as event_file:
        event = json.load(event_file)

    pr_info = PRInfo(event)

    gh = Github(get_best_robot_token())

    docker_image = get_image_with_version(temp_path, IMAGE_NAME)

    build_config = get_build_config_for_check(check_name)
    print(build_config)
    build_config_str = build_config_to_string(build_config)
    print(build_config_str)
    urls = get_build_urls(build_config_str, reports_path)
    if not urls:
        raise Exception("No build URLs found")

    for url in urls:
        if url.endswith('/clickhouse'):
            build_url = url
            break
    else:
        raise Exception("Cannot binary clickhouse among build results")

    logging.info("Got build url %s", build_url)

    workspace_path = os.path.join(temp_path, 'workspace')
Exemplo n.º 3
0
    pr_info = PRInfo(event)

    logging.info("Repo copy path %s", repo_path)

    gh = Github(get_best_robot_token())

    image_name = get_image_name(build_config)
    docker_image = get_image_with_version(os.getenv("IMAGES_PATH"), image_name)
    image_version = docker_image.version

    version = get_version_from_repo(repo_path)
    version.tweak_update()
    update_version_local(repo_path, pr_info.sha, version)

    build_name = build_config_to_string(build_config)
    logging.info("Build short name %s", build_name)
    subprocess.check_call(
        f"echo 'BUILD_NAME=build_urls_{build_name}' >> $GITHUB_ENV",
        shell=True)

    build_output_path = os.path.join(temp_path, build_name)
    if not os.path.exists(build_output_path):
        os.makedirs(build_output_path)

    ccache_path = os.path.join(caches_path, build_name + '_ccache')
    s3_helper = S3Helper('https://s3.amazonaws.com')

    logging.info("Will try to fetch cache for our build")
    get_ccache_if_not_exists(ccache_path, s3_helper, pr_info.number, temp_path)