Ejemplo n.º 1
0
    def create_build_dir_and_prepare(self, dep: Dependency) -> str:
        src_dir = self.fs_layout.get_source_path(dep)
        if not os.path.isdir(src_dir):
            fatal("Directory '{}' does not exist".format(src_dir))

        build_dir = self.fs_layout.get_build_dir_for_dependency(dep, self.build_type)
        mkdir_if_missing(build_dir)

        if dep.copy_sources:
            log("Bootstrapping %s from %s", build_dir, src_dir)
            subprocess.check_call(['rsync', '-a', src_dir + '/', build_dir])
        return build_dir
Ejemplo n.º 2
0
 def prepare_out_dirs(self) -> None:
     build_types = self.get_build_types()
     dirs = [
         os.path.join(self.fs_layout.tp_installed_dir, build_type) for build_type in build_types
     ]
     libcxx_dirs = [os.path.join(dir, 'libcxx') for dir in dirs]
     for dir in dirs + libcxx_dirs:
         lib_dir = os.path.join(dir, 'lib')
         mkdir_if_missing(lib_dir)
         mkdir_if_missing(os.path.join(dir, 'include'))
         # On some systems, autotools installs libraries to lib64 rather than lib.    Fix
         # this by setting up lib64 as a symlink to lib.    We have to do this step first
         # to handle cases where one third-party library depends on another.    Make sure
         # we create a relative symlink so that the entire PREFIX_DIR could be moved,
         # e.g. after it is packaged and then downloaded on a different build node.
         lib64_dir = os.path.join(dir, 'lib64')
         if os.path.exists(lib64_dir):
             if os.path.islink(lib64_dir):
                 continue
             remove_path(lib64_dir)
         os.symlink('lib', lib64_dir)
Ejemplo n.º 3
0
    def __init__(self, conf_name_pattern: str) -> None:
        self.common_conf = MultiBuildCommonConf()
        latest_link_path = os.path.join(self.common_conf.dir_of_all_runs,
                                        'latest')

        mkdir_if_missing(os.path.dirname(self.common_conf.checkout_dir))
        if os.path.exists(latest_link_path):
            os.remove(latest_link_path)
        os.symlink(os.path.basename(self.common_conf.root_run_dir),
                   latest_link_path)
        copy_code_to(self.common_conf.checkout_dir)

        with open(CIRCLECI_CONFIG_PATH) as circleci_conf_file:
            circleci_conf = yaml.load(circleci_conf_file,
                                      Loader=yaml.SafeLoader)

        self.configurations = []

        for circleci_job in circleci_conf['workflows']['build-release'][
                'jobs']:
            build_params = circleci_job['build']
            conf = BuildConfiguration(
                common_conf=self.common_conf,
                name=build_params['name'],
                docker_image=build_params['docker_image'],
                archive_name_suffix=build_params['archive_name_suffix'],
                build_thirdparty_args=build_params.get('build_thirdparty_args',
                                                       ''))
            if not conf_name_pattern:
                self.configurations.append(conf)
                continue
            if not fnmatch.fnmatch(conf.name, conf_name_pattern):
                log("Skipping configuration '%s' (does not match pattern %s)",
                    conf.name, conf_name_pattern)
                continue
            self.configurations.append(conf)

        for conf in self.configurations:
            log("Will build configuration: %s", conf.name)
Ejemplo n.º 4
0
    def build_with_cmake(self,
                         dep: Dependency,
                         extra_args: List[str] = [],
                         use_ninja_if_available: bool = True,
                         src_subdir_name: Optional[str] = None,
                         extra_build_tool_args: List[str] = [],
                         should_install: bool = True,
                         install_targets: List[str] = ['install'],
                         shared_and_static: bool = False) -> None:
        build_tool = 'make'
        if use_ninja_if_available:
            ninja_available = is_ninja_available()
            log('Ninja is %s',
                'available' if ninja_available else 'unavailable')
            if ninja_available:
                build_tool = 'ninja'

        log("Building dependency %s using CMake. Build tool: %s", dep,
            build_tool)
        log_prefix = self.log_prefix(dep)
        os.environ["YB_REMOTE_COMPILATION"] = "0"

        remove_path('CMakeCache.txt')
        remove_path('CMakeFiles')

        src_path = self.fs_layout.get_source_path(dep)
        if src_subdir_name is not None:
            src_path = os.path.join(src_path, src_subdir_name)

        args = ['cmake', src_path]
        if build_tool == 'ninja':
            args += ['-G', 'Ninja']
        args += self.get_common_cmake_flag_args(dep)
        if extra_args is not None:
            args += extra_args
        args += dep.get_additional_cmake_args(self)

        if shared_and_static and any(
                arg.startswith('-DBUILD_SHARED_LIBS=') for arg in args):
            raise ValueError(
                "shared_and_static=True is specified but CMake arguments already mention "
                "-DBUILD_SHARED_LIBS: %s" % args)

        if '-DBUILD_SHARED_LIBS=OFF' not in args and not shared_and_static:
            # TODO: a better approach for setting CMake arguments from multiple places.
            args.append('-DBUILD_SHARED_LIBS=ON')

        def build_internal(even_more_cmake_args: List[str] = []) -> None:
            final_cmake_args = args + even_more_cmake_args
            log("CMake command line (one argument per line):\n%s" %
                "\n".join([(" " * 4 + sanitize_flags_line_for_log(line))
                           for line in final_cmake_args]))
            log_output(log_prefix, final_cmake_args)

            if build_tool == 'ninja':
                dep.postprocess_ninja_build_file(self, 'build.ninja')

            build_tool_cmd = [
                build_tool, '-j{}'.format(get_make_parallelism())
            ] + extra_build_tool_args

            log_output(log_prefix, build_tool_cmd)

            if should_install:
                log_output(log_prefix, [build_tool] + install_targets)

            with open('compile_commands.json') as compile_commands_file:
                compile_commands = json.load(compile_commands_file)

            for command_item in compile_commands:
                command_args = command_item['command'].split()
                if self.build_type == BUILD_TYPE_ASAN:
                    assert_list_contains(command_args, '-fsanitize=address')
                    assert_list_contains(command_args, '-fsanitize=undefined')
                if self.build_type == BUILD_TYPE_TSAN:
                    assert_list_contains(command_args, '-fsanitize=thread')

        if shared_and_static:
            for build_shared_libs_value, subdir_name in (('ON', 'shared'),
                                                         ('OFF', 'static')):
                build_dir = os.path.join(os.getcwd(), subdir_name)
                mkdir_if_missing(build_dir)
                build_shared_libs_cmake_arg = '-DBUILD_SHARED_LIBS=%s' % build_shared_libs_value
                log(
                    "Building dependency '%s' for build type '%s' with option: %s",
                    dep.name, self.build_type, build_shared_libs_cmake_arg)
                with PushDir(build_dir):
                    build_internal([build_shared_libs_cmake_arg])
        else:
            build_internal()
Ejemplo n.º 5
0
 def prepare(self) -> None:
     self.conf_run_dir = os.path.join(self.common_conf.root_run_dir,
                                      self.name)
     self.output_file_path = os.path.join(self.conf_run_dir, 'output.log')
     mkdir_if_missing(self.conf_run_dir)
    def download_toolchain(
            self,
            toolchain_url: str,
            dest_parent_dir: str) -> str:
        """
        Download a C/C++ compiler toolchain, e.g. Linuxbrew GCC 5.5, or LLVM. Returns the directory
        where the toolchain is installed.
        """
        parsed_url = urlparse(toolchain_url)
        file_name = os.path.basename(parsed_url.path)

        dest_dir_name, archive_extension = split_archive_file_name(file_name)
        assert archive_extension.startswith('.'), \
            "Expected the archive extension to start with a dot, got: '%s'. URL: %s" % (
                toolchain_url, archive_extension
            )

        toolchain_dest_dir_path = os.path.join(dest_parent_dir, dest_dir_name)
        if os.path.exists(toolchain_dest_dir_path):
            log(f"Toolchain directory '{toolchain_dest_dir_path}' already exists, not downloading "
                f"URL {toolchain_url}")
            return toolchain_dest_dir_path

        mkdir_if_missing(dest_parent_dir)

        tmp_suffix = ".tmp-%s" % get_temporal_randomized_file_name_suffix()

        archive_temporary_dest_path = os.path.join(
            dest_parent_dir,
            "".join([
                dest_dir_name,
                tmp_suffix,
                archive_extension
            ])
        )
        checksum_suffix = '.sha256'
        archive_temporary_dest_checksum_path = archive_temporary_dest_path + checksum_suffix

        try:
            self.ensure_file_downloaded(
                toolchain_url + checksum_suffix,
                archive_temporary_dest_checksum_path,
                enable_using_alternative_url=False,
                verify_checksum=False)
            with open(archive_temporary_dest_checksum_path) as checksum_file:
                expected_checksum = checksum_file.read().strip().split()[0]

            self.ensure_file_downloaded(
                toolchain_url,
                archive_temporary_dest_path,
                enable_using_alternative_url=False,
                expected_checksum=expected_checksum)

            if dest_dir_name.startswith('linuxbrew'):
                dest_dir_name_tmp = dest_dir_name + tmp_suffix
                self.extract_archive(
                    archive_file_name=archive_temporary_dest_path,
                    out_dir=dest_parent_dir,
                    out_name=dest_dir_name_tmp
                )
                orig_brew_home = read_file(
                    os.path.join(dest_parent_dir, dest_dir_name_tmp, 'ORIG_BREW_HOME')
                ).strip()
                os.rename(os.path.join(dest_parent_dir, dest_dir_name_tmp), orig_brew_home)
                os.symlink(os.path.basename(orig_brew_home), toolchain_dest_dir_path)
            else:
                self.extract_archive(
                    archive_file_name=archive_temporary_dest_path,
                    out_dir=dest_parent_dir,
                    out_name=dest_dir_name
                )

            if not os.path.isdir(toolchain_dest_dir_path):
                raise RuntimeError(
                    f"Extracting the archive downloaded from {toolchain_url} did not create "
                    f"directory '{toolchain_dest_dir_path}'.")

        finally:
            for path_to_remove in [
                archive_temporary_dest_path,
                archive_temporary_dest_checksum_path
            ]:
                if os.path.exists(path_to_remove):
                    log("Removing temporary file '%s'", path_to_remove)
                    os.remove(path_to_remove)
        return toolchain_dest_dir_path
    def download_dependency(
            self,
            dep: Dependency,
            src_path: str,
            archive_path: Optional[str]) -> None:
        patch_level_path = os.path.join(src_path, 'patchlevel-{}'.format(dep.patch_version))
        if os.path.exists(patch_level_path):
            return

        download_url = dep.download_url
        assert download_url is not None, "Download URL not specified for dependency %s" % dep.name

        remove_path(src_path)

        # If download_url is "mkdir" then we just create empty directory with specified name.
        if download_url != 'mkdir':
            if archive_path is None:
                return
            self.ensure_file_downloaded(
                url=download_url,
                file_path=archive_path,
                enable_using_alternative_url=True)
            self.extract_archive(archive_path,
                                 os.path.dirname(src_path),
                                 os.path.basename(src_path))
        else:
            log("Creating %s", src_path)
            mkdir_if_missing(src_path)

        if hasattr(dep, 'extra_downloads'):
            for extra in dep.extra_downloads:
                assert extra.archive_name is not None
                archive_path = os.path.join(self.download_dir, extra.archive_name)
                log("Downloading %s from %s", extra.archive_name, extra.download_url)
                self.ensure_file_downloaded(
                    url=extra.download_url,
                    file_path=archive_path,
                    enable_using_alternative_url=True)
                output_path = os.path.join(src_path, extra.dir_name)
                self.extract_archive(archive_path, output_path)
                if extra.post_exec is not None:
                    with PushDir(output_path):
                        assert isinstance(extra.post_exec, list)
                        if isinstance(extra.post_exec[0], str):
                            subprocess.check_call(cast(List[str], extra.post_exec))
                        else:
                            for command in extra.post_exec:
                                subprocess.check_call(command)

        if hasattr(dep, 'patches'):
            with PushDir(src_path):
                for patch in dep.patches:
                    log("Applying patch: %s", patch)
                    process = subprocess.Popen(['patch', '-p{}'.format(dep.patch_strip)],
                                               stdin=subprocess.PIPE)
                    with open(os.path.join(YB_THIRDPARTY_DIR, 'patches', patch), 'rt') as inp:
                        patch = inp.read()
                    assert process.stdin is not None
                    process.stdin.write(patch.encode('utf-8'))
                    process.stdin.close()
                    exit_code = process.wait()
                    if exit_code:
                        fatal("Patch {} failed with code: {}".format(dep.name, exit_code))
                if dep.post_patch:
                    subprocess.check_call(dep.post_patch)

        with open(patch_level_path, 'wb') as out:
            # Just create an empty file.
            pass
    def ensure_file_downloaded(
            self,
            url: str,
            file_path: str,
            enable_using_alternative_url: bool,
            expected_checksum: Optional[str] = None,
            verify_checksum: bool = True) -> None:
        log(f"Ensuring {url} is downloaded to path {file_path}")
        file_name = os.path.basename(file_path)

        mkdir_if_missing(self.download_dir)

        if os.path.exists(file_path) and verify_checksum:
            # We check the filename against our checksum map only if the file exists. This is done
            # so that we would still download the file even if we don't know the checksum, making it
            # easier to add new third-party dependencies.
            if expected_checksum is None:
                expected_checksum = self.get_expected_checksum_and_maybe_add_to_file(
                    file_name, downloaded_path=file_path)
            if self.verify_checksum(file_path, expected_checksum):
                log("No need to re-download %s: checksum already correct", file_name)
                return
            log("File %s already exists but has wrong checksum, removing", file_path)
            remove_path(file_path)

        log("Fetching %s from %s", file_name, url)

        download_successful = False
        alternative_url = ALTERNATIVE_URL_PREFIX + file_name
        total_attempts = 0

        url_candidates = [url]
        if enable_using_alternative_url:
            url_candidates += [alternative_url]

        for effective_url in url_candidates:
            if effective_url == alternative_url:
                log("Switching to alternative download URL %s after %d attempts",
                    alternative_url, total_attempts)
            sleep_time_sec = INITIAL_DOWNLOAD_RETRY_SLEEP_TIME_SEC
            for attempt_index in range(1, MAX_FETCH_ATTEMPTS + 1):
                try:
                    total_attempts += 1
                    curl_cmd_line = [
                        self.curl_path,
                        '-o',
                        file_path,
                        '-L',  # follow redirects
                        '--silent',
                        '--show-error',
                        '--location',
                        effective_url]
                    log("Running command: %s", shlex_join(curl_cmd_line))
                    subprocess.check_call(curl_cmd_line)
                    download_successful = True
                    break
                except subprocess.CalledProcessError as ex:
                    log("Error downloading %s (attempt %d for this URL, total attempts %d): %s",
                        self.curl_path, attempt_index, total_attempts, str(ex))
                    if attempt_index == MAX_FETCH_ATTEMPTS and effective_url == alternative_url:
                        log("Giving up after %d attempts", MAX_FETCH_ATTEMPTS)
                        raise ex
                    log("Will retry after %.1f seconds", sleep_time_sec)
                    time.sleep(sleep_time_sec)
                    sleep_time_sec += DOWNLOAD_RETRY_SLEEP_INCREASE_SEC

            if download_successful:
                break

        if not os.path.exists(file_path):
            fatal("Downloaded '%s' but but unable to find '%s'", url, file_path)
        if verify_checksum:
            if expected_checksum is None:
                expected_checksum = self.get_expected_checksum_and_maybe_add_to_file(
                    file_name, downloaded_path=file_path)
            if not self.verify_checksum(file_path, expected_checksum):
                fatal("File '%s' has wrong checksum after downloading from '%s'. "
                      "Has %s, but expected: %s",
                      file_path,
                      url,
                      compute_file_sha256(file_path),
                      expected_checksum)