Ejemplo n.º 1
0
def get_third_party_release(
        available_archives: List[MetadataItem],
        compiler_type: str,
        os_type: Optional[str],
        architecture: Optional[str],
        is_linuxbrew: Optional[bool],
        lto: Optional[str]) -> MetadataItem:
    if not os_type:
        os_type = local_sys_conf().short_os_name_and_version()
    if not architecture:
        architecture = local_sys_conf().architecture

    needed_compiler_type = compiler_type

    candidates: List[Any] = [
        archive for archive in available_archives
        if compiler_type_matches(archive.compiler_type, needed_compiler_type) and
        archive.architecture == architecture and
        matches_maybe_empty(archive.lto_type, lto)
    ]

    if is_linuxbrew is not None:
        candidates = [
            candidate for candidate in candidates
            if candidate.is_linuxbrew == is_linuxbrew
        ]

    if is_linuxbrew is None or not is_linuxbrew or len(candidates) > 1:
        # If a Linuxbrew archive is requested, we don't have to filter by OS, because archives
        # should be OS-independent. But still do that if we have more than one candidate.
        candidates = filter_for_os(candidates, os_type)

    if len(candidates) == 1:
        return candidates[0]

    if not candidates:
        if compiler_type == 'gcc' and os_type == 'ubuntu18.04':
            logging.info(
                "Assuming that the compiler type of 'gcc' means 'gcc7' on Ubuntu 18.04.")
            return get_third_party_release(
                available_archives, 'gcc7', os_type, architecture, is_linuxbrew, lto=None)

    if candidates:
        i = 1
        for candidate in candidates:
            logging.warning("Third-party release archive candidate #%d: %s", i, candidate)
            i += 1
        wrong_count_str = 'more than one'
    else:
        logging.info(f"Available release archives:\n{to_yaml_str(available_archives)}")
        wrong_count_str = 'no'

    raise ValueError(
        f"Found {wrong_count_str} third-party release archives to download for OS type "
        f"{os_type}, compiler type matching {compiler_type}, architecture {architecture}, "
        f"is_linuxbrew={is_linuxbrew}. See more details above.")
Ejemplo n.º 2
0
def get_download_url(metadata: Dict[str, Any], compiler_type: str,
                     os_type: Optional[str],
                     architecture: Optional[str]) -> str:
    if not os_type:
        os_type = local_sys_conf().short_os_name_and_version()
    if not architecture:
        architecture = local_sys_conf().architecture

    candidates: List[Any] = []
    available_archives = metadata['archives']
    for archive in available_archives:
        if archive['compiler_type'] == compiler_type and archive[
                'architecture'] == architecture:
            candidates.append(archive)
    candidates = filter_for_os(candidates, os_type)

    if len(candidates) == 1:
        tag = candidates[0]['tag']
        return f'{DOWNLOAD_URL_PREFIX}{tag}/{get_archive_name_from_tag(tag)}'

    if not candidates:
        if os_type == 'centos7' and compiler_type == 'gcc':
            logging.info(
                "Assuming that the compiler type of 'gcc' means 'gcc5'. "
                "This will change when we stop using Linuxbrew and update the compiler."
            )
            return get_download_url(metadata, 'gcc5', os_type, architecture)
        if os_type == 'ubuntu18.04' and compiler_type == 'gcc':
            logging.info(
                "Assuming that the compiler type of 'gcc' means 'gcc7'. "
                "This will change when we stop using Linuxbrew and update the compiler."
            )
            return get_download_url(metadata, 'gcc7', os_type, architecture)

        logging.info(
            f"Available release archives:\n{to_yaml_str(available_archives)}")
        raise ValueError(
            "Could not find a third-party release archive to download for "
            f"OS type '{os_type}', "
            f"compiler type '{compiler_type}', and "
            f"architecture '{architecture}'. "
            "Please see the list of available thirdparty archives above.")

    i = 1
    for candidate in candidates:
        logging.warning("Third-party release archive candidate #%d: %s", i,
                        candidate)
        i += 1

    raise ValueError(
        f"Found too many third-party release archives to download for OS type "
        f"{os_type} and compiler type matching {compiler_type}: {candidates}.")
Ejemplo n.º 3
0
def get_toolchain_url(toolchain_type: str) -> str:
    if toolchain_type == 'linuxbrew':
        # Does not depend on the OS.
        return LINUXBREW_URL

    os_and_arch_to_url = TOOLCHAIN_TO_OS_AND_ARCH_TO_URL[toolchain_type]
    local_sys_conf = sys_detection.local_sys_conf()
    os_and_arch = local_sys_conf.id_for_packaging()
    if os_and_arch in os_and_arch_to_url:
        toolchain_url = os_and_arch_to_url[os_and_arch]
    else:
        os_and_arch_candidates = []
        for os_and_arch_candidate in os_and_arch_to_url.keys():
            if is_compatible_os_arch_combination(os_and_arch_candidate,
                                                 os_and_arch):
                os_and_arch_candidates.append(os_and_arch_candidate)
        err_msg_prefix = (
            f"Toolchain {toolchain_type} not found for OS/architecture combination {os_and_arch}"
        )
        if not os_and_arch_candidates:
            raise ValueError(
                f"{err_msg_prefix}, and no compatible OS/architecture combinations found."
            )
        if len(os_and_arch_candidates) > 1:
            raise ValueError(
                f"{err_msg_prefix}, and too many compatible OS/architecture combinations "
                "found, cannot choose automatically: {os_and_arch_candidates}")
        effective_os_and_arch = os_and_arch_candidates[0]
        toolchain_url = os_and_arch_to_url[effective_os_and_arch]
    return toolchain_url
Ejemplo n.º 4
0
def set_rpath(file_path: str, rpath: str) -> None:
    if not is_elf(file_path):
        return
    linux_os_release: Optional[OsReleaseVars] = local_sys_conf(
    ).linux_os_release
    chrpath_path: Optional[str] = get_chrpath_path()

    cmd_line: List[str]

    if linux_os_release is not None and linux_os_release.id == 'amzn' and chrpath_path is not None:
        # On Amazon Linux, we prefer chrpath, because patchelf version 0.9 installed on Amazon
        # Linux 2 silently fails to set rpath in our experience.
        cmd_line = [chrpath_path, '-r', rpath, file_path]
    else:
        patchelf_path = get_patchelf_path()
        assert patchelf_path is not None
        cmd_line = [patchelf_path, file_path, '--set-rpath', rpath]

    subprocess.check_call(cmd_line)

    # Verify that we have succeeded in setting rpath.
    actual_rpath = get_rpath(file_path)
    if actual_rpath != rpath:
        raise ValueError(
            f"Failed to set rpath on file {file_path} to {rpath}: it is now {actual_rpath}"
        )
Ejemplo n.º 5
0
def get_compilers(metadata_items: List[MetadataItem], os_type: Optional[str],
                  architecture: Optional[str]) -> list:
    if not os_type:
        os_type = local_sys_conf().short_os_name_and_version()
    if not architecture:
        architecture = local_sys_conf().architecture

    candidates: List[MetadataItem] = [
        metadata_item for metadata_item in metadata_items
        if metadata_item.architecture == architecture
    ]

    candidates = filter_for_os(candidates, os_type)
    compilers = sorted(
        set([metadata_item.compiler_type for metadata_item in candidates]))

    return compilers
Ejemplo n.º 6
0
def get_llvm_url(compiler_type: str) -> Optional[str]:
    os_type_to_llvm_url = (
        COMPILER_TYPE_TO_ARCH_TO_OS_TYPE_TO_LLVM_URL.get(compiler_type)
        or {}).get(local_sys_conf().architecture)
    if os_type_to_llvm_url is None:
        return None

    os_type = local_sys_conf().short_os_name_and_version()
    if os_type in os_type_to_llvm_url:
        return os_type_to_llvm_url[os_type]

    candidate_urls = [
        os_type_to_llvm_url[os_type_key] for os_type_key in os_type_to_llvm_url
        if is_compatible_os(os_type_key, os_type)
    ]
    if len(candidate_urls) > 1:
        raise ValueError("Ambiguous LLVM URLs: %s" % candidate_urls)
    if not candidate_urls:
        return None
    candidate_url = candidate_urls[0]
    return candidate_url
Ejemplo n.º 7
0
def ensure_toolchain_installed(download_manager: DownloadManager,
                               toolchain_type: str) -> Toolchain:
    assert toolchain_type in TOOLCHAIN_TYPES, (
        f"Invalid toolchain type: '{toolchain_type}'. Valid types: "
        f"{', '.join(TOOLCHAIN_TYPES)}.")

    os_and_arch_to_url = TOOLCHAIN_TO_OS_AND_ARCH_TO_URL[toolchain_type]
    local_sys_conf = sys_detection.local_sys_conf()
    os_and_arch = local_sys_conf.id_for_packaging()
    if os_and_arch in os_and_arch_to_url:
        toolchain_url = os_and_arch_to_url[os_and_arch]
    else:
        os_and_arch_candidates = []
        for os_and_arch_candidate in os_and_arch_to_url.keys():
            if is_compatible_os_arch_combination(os_and_arch_candidate,
                                                 os_and_arch):
                os_and_arch_candidates.append(os_and_arch_candidate)
        err_msg_prefix = (
            f"Toolchain {toolchain_type} not found for OS/architecture combination {os_and_arch}"
        )
        if not os_and_arch_candidates:
            raise ValueError(
                f"{err_msg_prefix}, and no compatible OS/architecture combinations found."
            )
        if len(os_and_arch_candidates) > 1:
            raise ValueError(
                f"{err_msg_prefix}, and too many compatible OS/architecture combinations "
                "found, cannot choose automatically: {os_and_arch_candidates}")
        effective_os_and_arch = os_and_arch_candidates[0]
        toolchain_url = os_and_arch_to_url[effective_os_and_arch]

    if toolchain_type.startswith('llvm'):
        parent_dir = '/opt/yb-build/llvm'
    elif toolchain_type == 'linuxbrew':
        parent_dir = '/opt/yb-build/brew'
    else:
        raise RuntimeError(
            f"We don't know where to install toolchain of type f{toolchain_type}"
        )

    toolchain_root = download_manager.download_toolchain(
        toolchain_url, parent_dir)

    return Toolchain(toolchain_url=toolchain_url,
                     toolchain_type=toolchain_type,
                     toolchain_root=toolchain_root)
Ejemplo n.º 8
0
def get_third_party_release(available_archives: List[MetadataItem],
                            compiler_type: str, os_type: Optional[str],
                            architecture: Optional[str],
                            is_linuxbrew: Optional[bool],
                            lto: Optional[str]) -> MetadataItem:
    if not os_type:
        os_type = local_sys_conf().short_os_name_and_version()
    if not architecture:
        architecture = local_sys_conf().architecture

    needed_compiler_type = compiler_type
    if compiler_type == 'gcc11' and os_type == 'almalinux8' and architecture == 'x86_64':
        # A temporary workaround for https://github.com/yugabyte/yugabyte-db/issues/12429
        # Strictly speaking, we don't have to build third-party dependencies with the same compiler
        # as we build YugabyteDB code with, unless we want to use advanced features like LTO where
        # the "bitcode" format files of object files being linked need to match, and they may differ
        # across different compiler versions. Another potential issue is libstdc++ versioning.
        # We don't want to end up linking two different versions of libstdc++.
        #
        # TODO: use a third-party archive built with GCC 10 (and ideally with GCC 11 itself).
        needed_compiler_type = 'gcc9'

    candidates: List[Any] = [
        archive for archive in available_archives
        if compiler_type_matches(archive.compiler_type, needed_compiler_type)
        and archive.architecture == architecture
        and matches_maybe_empty(archive.lto_type, lto)
    ]

    if is_linuxbrew is not None:
        candidates = [
            candidate for candidate in candidates
            if candidate.is_linuxbrew == is_linuxbrew
        ]

    if is_linuxbrew is None or not is_linuxbrew or len(candidates) > 1:
        # If a Linuxbrew archive is requested, we don't have to filter by OS, because archives
        # should be OS-independent. But still do that if we have more than one candidate.
        candidates = filter_for_os(candidates, os_type)

    if len(candidates) == 1:
        return candidates[0]

    if not candidates:
        if compiler_type == 'gcc' and os_type == 'ubuntu18.04':
            logging.info(
                "Assuming that the compiler type of 'gcc' means 'gcc7' on Ubuntu 18.04."
            )
            return get_third_party_release(available_archives,
                                           'gcc7',
                                           os_type,
                                           architecture,
                                           is_linuxbrew,
                                           lto=None)

    if candidates:
        i = 1
        for candidate in candidates:
            logging.warning("Third-party release archive candidate #%d: %s", i,
                            candidate)
            i += 1
        wrong_count_str = 'more than one'
    else:
        logging.info(
            f"Available release archives:\n{to_yaml_str(available_archives)}")
        wrong_count_str = 'no'

    raise ValueError(
        f"Found {wrong_count_str} third-party release archives to download for OS type "
        f"{os_type}, compiler type matching {compiler_type}, architecture {architecture}, "
        f"is_linuxbrew={is_linuxbrew}. See more details above.")
def parse_cmd_line_args() -> argparse.Namespace:
    parser = argparse.ArgumentParser(prog=sys.argv[0])
    parser.add_argument(
        '--build-type',
        default=None,
        type=str,
        choices=BUILD_TYPES,
        help='Build only specific part of thirdparty dependencies.')
    parser.add_argument(
        '--skip-sanitizers',
        action='store_true',
        help='Do not build ASAN and TSAN instrumented dependencies.')
    parser.add_argument('--clean',
                        action='store_true',
                        default=False,
                        help='Clean, but keep downloads.')
    parser.add_argument('--clean-downloads',
                        action='store_true',
                        default=False,
                        help='Clean, including downloads.')
    parser.add_argument('--add-checksum',
                        help='Compute and add unknown checksums to %s' %
                        CHECKSUM_FILE_NAME,
                        action='store_true')
    parser.add_argument('--skip', help='Dependencies to skip')

    parser.add_argument(
        '--single-compiler-type',
        type=str,
        choices=['gcc', 'clang'],
        default=None,
        help=
        'Produce a third-party dependencies build using only a single compiler. '
        'This also implies that we are not using Linuxbrew.')

    parser.add_argument(
        '--compiler-prefix',
        type=str,
        help=
        'The prefix directory for looking for compiler executables. We will look for '
        'compiler executable in the bin subdirectory of this directory.')

    parser.add_argument(
        '--compiler-suffix',
        type=str,
        default='',
        help=
        'Suffix to append to compiler executables, such as the version number, '
        'potentially prefixed with a dash, to obtain names such as gcc-8, g++-8, '
        'clang-10, or clang++-10.')

    parser.add_argument('--devtoolset',
                        type=int,
                        help='Specifies a CentOS devtoolset')

    parser.add_argument(
        '-j',
        '--make-parallelism',
        help='How many cores should the build use. This is passed to '
        'Make/Ninja child processes. This can also be specified using the '
        'YB_MAKE_PARALLELISM environment variable.',
        type=int)

    parser.add_argument('--use-ccache',
                        action='store_true',
                        help='Use ccache to speed up compilation')

    parser.add_argument(
        '--use-compiler-wrapper',
        action='store_true',
        help='Use a compiler wrapper script. Allows additional validation but '
        'makes the build slower.')

    parser.add_argument(
        '--llvm-version',
        default=None,
        help='Version (tag) to use for dependencies based on LLVM codebase')

    parser.add_argument(
        '--remote-build-server',
        help=
        'Build third-party dependencies remotely on this server. The default value is '
        'determined by YB_THIRDPARTY_REMOTE_BUILD_SERVER environment variable.',
        default=os.getenv('YB_THIRDPARTY_REMOTE_BUILD_SERVER'))

    parser.add_argument(
        '--remote-build-dir',
        help=
        'The directory on the remote server to build third-party dependencies in. The '
        'value is determined by the YB_THIRDPARTY_REMOTE_BUILD_DIR environment variable.',
        default=os.getenv('YB_THIRDPARTY_REMOTE_BUILD_DIR'))

    parser.add_argument(
        '--local',
        help=
        'Forces the local build even if --remote-... options are specified or the '
        'corresponding environment variables are set.',
        action='store_true')

    parser.add_argument(
        '--download-extract-only',
        help=
        'Only download and extract archives. Do not build any dependencies.',
        action='store_true')

    parser.add_argument('--license-report',
                        action='store_true',
                        help='Generate a license report.')

    parser.add_argument(
        '--toolchain',
        help='Automatically download, install and use the given toolchain',
        choices=TOOLCHAIN_TYPES)

    parser.add_argument('--create-package',
                        help='Create the package tarball',
                        action='store_true')

    parser.add_argument(
        '--upload-as-tag',
        help='Upload the package tarball as a GitHub release under this tag. '
        'Implies --create-package. Requires GITHUB_TOKEN to be set. If GITHUB_TOKEN is not '
        'set, this is a no-op (with success exit code).')

    parser.add_argument(
        '--expected-major-compiler-version',
        type=int,
        help='Expect the major version of the compiler to be as specified')

    parser.add_argument('--verbose',
                        help='Show verbose output',
                        action='store_true')

    parser.add_argument('dependencies',
                        nargs=argparse.REMAINDER,
                        help='Dependencies to build.')

    parser.add_argument(
        '--enforce_arch',
        help=
        'Ensure that we use the given architecture, such as arm64. Useful for macOS systems '
        'with Apple Silicon CPUs and Rosetta 2 installed that can switch between '
        'architectures.')

    parser.add_argument(
        '--force',
        help=
        'Build dependencies even though the system does not detect any changes compared '
        'to an earlier completed build.',
        action='store_true')

    parser.add_argument(
        '--delete-build-dir',
        help=
        "Delete each dependency's build directory to start each build from scratch. "
        "Note that this does not affect the corresponding source directory.",
        action='store_true')

    args = parser.parse_args()

    # ---------------------------------------------------------------------------------------------
    # Validating arguments
    # ---------------------------------------------------------------------------------------------

    if args.dependencies and args.skip:
        raise ValueError(
            "--skip is not compatible with specifying a list of dependencies to build"
        )

    if is_macos():
        if args.single_compiler_type not in [None, 'clang']:
            raise ValueError(
                "--single-compiler-type=%s is not allowed on macOS" %
                args.single_compiler_type)
        args.single_compiler_type = 'clang'

    if args.local and (args.remote_build_server is not None
                       or args.remote_build_dir is not None):
        log("Forcing a local build")
        args.remote_build_server = None
        args.remote_build_dir = None

    if (args.remote_build_server is None) != (args.remote_build_dir is None):
        raise ValueError(
            '--remote-build-server and --remote-build-dir have to be specified or unspecified '
            'at the same time. Note that their default values are provided by corresponding '
            'environment variables, YB_THIRDPARTY_REMOTE_BUILD_SERVER and '
            'YB_THIRDPARTY_REMOTE_BUILD_DIR.')
    if args.remote_build_dir is not None:
        assert os.path.isabs(args.remote_build_dir), (
            'Remote build directory path must be an absolute path: %s' %
            args.remote_build_dir)

    is_remote_build = args.remote_build_server is not None

    if args.devtoolset is not None and not is_remote_build:
        if not local_sys_conf().is_redhat_family():
            raise ValueError(
                "--devtoolset can only be used on Red Hat Enterprise Linux OS family"
            )
        if args.single_compiler_type not in [None, 'gcc']:
            raise ValueError(
                "--devtoolset is not compatible with compiler type: %s" %
                args.single_compiler_type)
        args.single_compiler_type = 'gcc'

    if args.toolchain:
        if args.devtoolset:
            raise ValueError("--devtoolset and --toolchain are incompatible")

        if args.compiler_prefix:
            raise ValueError(
                "--compiler-prefix and --toolchain are incompatible")

        if args.compiler_suffix:
            raise ValueError(
                "--compiler-suffix and --toolchain are incompatible")

    if args.enforce_arch and platform.machine() != args.enforce_arch:
        raise ValueError("Machine architecture is %s but we expect %s" %
                         (platform.machine(), args.enforce_arch))

    return args