예제 #1
0
 def _remove_path_for_dependency(
         self, dep: Dependency, path: Optional[str], description: str) -> None:
     full_description = f"{description} for dependency {dep.name}"
     if path is None:
         log(f"Path to {full_description} is not defined")
         return
     if os.path.exists(path):
         log(f"Removing {full_description} at {path}")
         remove_path(path)
     else:
         log(f"Could not find {full_description} at {path}, nothing to remove")
예제 #2
0
    def create_package(self) -> None:
        if os.path.exists(self.archive_tarball_path):
            logging.info("File already exists, deleting: %s",
                         self.archive_tarball_path)
            os.remove(self.archive_tarball_path)

        # Create a symlink with a constant name so we can copy the file around and use it for
        # creating artifacts for pull request builds.
        archive_symlink_path = os.path.join(YB_THIRDPARTY_DIR,
                                            'archive' + ARCHIVE_SUFFIX)
        archive_checksum_symlink_path = archive_symlink_path + CHECKSUM_SUFFIX

        tar_cmd = ['tar']
        patterns_to_exclude = EXCLUDE_PATTERNS_RELATIVE_TO_ARCHIVE_ROOT + [
            os.path.basename(file_path) for file_path in
            [archive_symlink_path, archive_checksum_symlink_path]
        ]
        for excluded_pattern in patterns_to_exclude:
            tar_cmd.extend([
                '--exclude',
                '%s/%s' % (self.archive_dir_name, excluded_pattern)
            ])
        for excluded_pattern in GENERAL_EXCLUDE_PATTERNS:
            tar_cmd.extend(['--exclude', excluded_pattern])

        tar_cmd.extend(
            ['-czf', self.archive_tarball_path, self.archive_dir_name])
        log_and_run_cmd(tar_cmd, cwd=self.build_dir_parent)

        sha256 = compute_file_sha256(self.archive_tarball_path)
        with open(self.archive_checksum_path, 'w') as sha256_file:
            sha256_file.write('%s  %s\n' % (sha256, self.archive_tarball_name))
        logging.info("Archive SHA256 checksum: %s, created checksum file: %s",
                     sha256, self.archive_checksum_path)

        for file_path in [archive_symlink_path, archive_checksum_symlink_path]:
            remove_path(file_path)

        create_symlink_and_log(self.archive_tarball_path, archive_symlink_path)
        create_symlink_and_log(self.archive_checksum_path,
                               archive_checksum_symlink_path)
예제 #3
0
 def prepare_out_dirs(self) -> None:
     build_types = self.get_build_types()
     dirs = [
         os.path.join(self.fs_layout.tp_installed_dir, build_type) for build_type in build_types
     ]
     libcxx_dirs = [os.path.join(dir, 'libcxx') for dir in dirs]
     for dir in dirs + libcxx_dirs:
         lib_dir = os.path.join(dir, 'lib')
         mkdir_if_missing(lib_dir)
         mkdir_if_missing(os.path.join(dir, 'include'))
         # On some systems, autotools installs libraries to lib64 rather than lib.    Fix
         # this by setting up lib64 as a symlink to lib.    We have to do this step first
         # to handle cases where one third-party library depends on another.    Make sure
         # we create a relative symlink so that the entire PREFIX_DIR could be moved,
         # e.g. after it is packaged and then downloaded on a different build node.
         lib64_dir = os.path.join(dir, 'lib64')
         if os.path.exists(lib64_dir):
             if os.path.islink(lib64_dir):
                 continue
             remove_path(lib64_dir)
         os.symlink('lib', lib64_dir)
예제 #4
0
    def build_with_cmake(self,
                         dep: Dependency,
                         extra_args: List[str] = [],
                         use_ninja_if_available: bool = True,
                         src_subdir_name: Optional[str] = None,
                         extra_build_tool_args: List[str] = [],
                         should_install: bool = True,
                         install_targets: List[str] = ['install'],
                         shared_and_static: bool = False) -> None:
        build_tool = 'make'
        if use_ninja_if_available:
            ninja_available = is_ninja_available()
            log('Ninja is %s',
                'available' if ninja_available else 'unavailable')
            if ninja_available:
                build_tool = 'ninja'

        log("Building dependency %s using CMake. Build tool: %s", dep,
            build_tool)
        log_prefix = self.log_prefix(dep)
        os.environ["YB_REMOTE_COMPILATION"] = "0"

        remove_path('CMakeCache.txt')
        remove_path('CMakeFiles')

        src_path = self.fs_layout.get_source_path(dep)
        if src_subdir_name is not None:
            src_path = os.path.join(src_path, src_subdir_name)

        args = ['cmake', src_path]
        if build_tool == 'ninja':
            args += ['-G', 'Ninja']
        args += self.get_common_cmake_flag_args(dep)
        if extra_args is not None:
            args += extra_args
        args += dep.get_additional_cmake_args(self)

        if shared_and_static and any(
                arg.startswith('-DBUILD_SHARED_LIBS=') for arg in args):
            raise ValueError(
                "shared_and_static=True is specified but CMake arguments already mention "
                "-DBUILD_SHARED_LIBS: %s" % args)

        if '-DBUILD_SHARED_LIBS=OFF' not in args and not shared_and_static:
            # TODO: a better approach for setting CMake arguments from multiple places.
            args.append('-DBUILD_SHARED_LIBS=ON')

        def build_internal(even_more_cmake_args: List[str] = []) -> None:
            final_cmake_args = args + even_more_cmake_args
            log("CMake command line (one argument per line):\n%s" %
                "\n".join([(" " * 4 + sanitize_flags_line_for_log(line))
                           for line in final_cmake_args]))
            log_output(log_prefix, final_cmake_args)

            if build_tool == 'ninja':
                dep.postprocess_ninja_build_file(self, 'build.ninja')

            build_tool_cmd = [
                build_tool, '-j{}'.format(get_make_parallelism())
            ] + extra_build_tool_args

            log_output(log_prefix, build_tool_cmd)

            if should_install:
                log_output(log_prefix, [build_tool] + install_targets)

            with open('compile_commands.json') as compile_commands_file:
                compile_commands = json.load(compile_commands_file)

            for command_item in compile_commands:
                command_args = command_item['command'].split()
                if self.build_type == BUILD_TYPE_ASAN:
                    assert_list_contains(command_args, '-fsanitize=address')
                    assert_list_contains(command_args, '-fsanitize=undefined')
                if self.build_type == BUILD_TYPE_TSAN:
                    assert_list_contains(command_args, '-fsanitize=thread')

        if shared_and_static:
            for build_shared_libs_value, subdir_name in (('ON', 'shared'),
                                                         ('OFF', 'static')):
                build_dir = os.path.join(os.getcwd(), subdir_name)
                mkdir_if_missing(build_dir)
                build_shared_libs_cmake_arg = '-DBUILD_SHARED_LIBS=%s' % build_shared_libs_value
                log(
                    "Building dependency '%s' for build type '%s' with option: %s",
                    dep.name, self.build_type, build_shared_libs_cmake_arg)
                with PushDir(build_dir):
                    build_internal([build_shared_libs_cmake_arg])
        else:
            build_internal()
    def download_dependency(
            self,
            dep: Dependency,
            src_path: str,
            archive_path: Optional[str]) -> None:
        patch_level_path = os.path.join(src_path, 'patchlevel-{}'.format(dep.patch_version))
        if os.path.exists(patch_level_path):
            return

        download_url = dep.download_url
        assert download_url is not None, "Download URL not specified for dependency %s" % dep.name

        remove_path(src_path)

        # If download_url is "mkdir" then we just create empty directory with specified name.
        if download_url != 'mkdir':
            if archive_path is None:
                return
            self.ensure_file_downloaded(
                url=download_url,
                file_path=archive_path,
                enable_using_alternative_url=True)
            self.extract_archive(archive_path,
                                 os.path.dirname(src_path),
                                 os.path.basename(src_path))
        else:
            log("Creating %s", src_path)
            mkdir_if_missing(src_path)

        if hasattr(dep, 'extra_downloads'):
            for extra in dep.extra_downloads:
                assert extra.archive_name is not None
                archive_path = os.path.join(self.download_dir, extra.archive_name)
                log("Downloading %s from %s", extra.archive_name, extra.download_url)
                self.ensure_file_downloaded(
                    url=extra.download_url,
                    file_path=archive_path,
                    enable_using_alternative_url=True)
                output_path = os.path.join(src_path, extra.dir_name)
                self.extract_archive(archive_path, output_path)
                if extra.post_exec is not None:
                    with PushDir(output_path):
                        assert isinstance(extra.post_exec, list)
                        if isinstance(extra.post_exec[0], str):
                            subprocess.check_call(cast(List[str], extra.post_exec))
                        else:
                            for command in extra.post_exec:
                                subprocess.check_call(command)

        if hasattr(dep, 'patches'):
            with PushDir(src_path):
                for patch in dep.patches:
                    log("Applying patch: %s", patch)
                    process = subprocess.Popen(['patch', '-p{}'.format(dep.patch_strip)],
                                               stdin=subprocess.PIPE)
                    with open(os.path.join(YB_THIRDPARTY_DIR, 'patches', patch), 'rt') as inp:
                        patch = inp.read()
                    assert process.stdin is not None
                    process.stdin.write(patch.encode('utf-8'))
                    process.stdin.close()
                    exit_code = process.wait()
                    if exit_code:
                        fatal("Patch {} failed with code: {}".format(dep.name, exit_code))
                if dep.post_patch:
                    subprocess.check_call(dep.post_patch)

        with open(patch_level_path, 'wb') as out:
            # Just create an empty file.
            pass
    def ensure_file_downloaded(
            self,
            url: str,
            file_path: str,
            enable_using_alternative_url: bool,
            expected_checksum: Optional[str] = None,
            verify_checksum: bool = True) -> None:
        log(f"Ensuring {url} is downloaded to path {file_path}")
        file_name = os.path.basename(file_path)

        mkdir_if_missing(self.download_dir)

        if os.path.exists(file_path) and verify_checksum:
            # We check the filename against our checksum map only if the file exists. This is done
            # so that we would still download the file even if we don't know the checksum, making it
            # easier to add new third-party dependencies.
            if expected_checksum is None:
                expected_checksum = self.get_expected_checksum_and_maybe_add_to_file(
                    file_name, downloaded_path=file_path)
            if self.verify_checksum(file_path, expected_checksum):
                log("No need to re-download %s: checksum already correct", file_name)
                return
            log("File %s already exists but has wrong checksum, removing", file_path)
            remove_path(file_path)

        log("Fetching %s from %s", file_name, url)

        download_successful = False
        alternative_url = ALTERNATIVE_URL_PREFIX + file_name
        total_attempts = 0

        url_candidates = [url]
        if enable_using_alternative_url:
            url_candidates += [alternative_url]

        for effective_url in url_candidates:
            if effective_url == alternative_url:
                log("Switching to alternative download URL %s after %d attempts",
                    alternative_url, total_attempts)
            sleep_time_sec = INITIAL_DOWNLOAD_RETRY_SLEEP_TIME_SEC
            for attempt_index in range(1, MAX_FETCH_ATTEMPTS + 1):
                try:
                    total_attempts += 1
                    curl_cmd_line = [
                        self.curl_path,
                        '-o',
                        file_path,
                        '-L',  # follow redirects
                        '--silent',
                        '--show-error',
                        '--location',
                        effective_url]
                    log("Running command: %s", shlex_join(curl_cmd_line))
                    subprocess.check_call(curl_cmd_line)
                    download_successful = True
                    break
                except subprocess.CalledProcessError as ex:
                    log("Error downloading %s (attempt %d for this URL, total attempts %d): %s",
                        self.curl_path, attempt_index, total_attempts, str(ex))
                    if attempt_index == MAX_FETCH_ATTEMPTS and effective_url == alternative_url:
                        log("Giving up after %d attempts", MAX_FETCH_ATTEMPTS)
                        raise ex
                    log("Will retry after %.1f seconds", sleep_time_sec)
                    time.sleep(sleep_time_sec)
                    sleep_time_sec += DOWNLOAD_RETRY_SLEEP_INCREASE_SEC

            if download_successful:
                break

        if not os.path.exists(file_path):
            fatal("Downloaded '%s' but but unable to find '%s'", url, file_path)
        if verify_checksum:
            if expected_checksum is None:
                expected_checksum = self.get_expected_checksum_and_maybe_add_to_file(
                    file_name, downloaded_path=file_path)
            if not self.verify_checksum(file_path, expected_checksum):
                fatal("File '%s' has wrong checksum after downloading from '%s'. "
                      "Has %s, but expected: %s",
                      file_path,
                      url,
                      compute_file_sha256(file_path),
                      expected_checksum)