def _detect_linuxbrew() -> None: global g_linuxbrew_dir if not is_linux(): log("Not using Linuxbrew -- this is not Linux") return linuxbrew_dir_from_env = os.getenv('YB_LINUXBREW_DIR') if linuxbrew_dir_from_env: g_linuxbrew_dir = linuxbrew_dir_from_env log( "Setting Linuxbrew directory based on YB_LINUXBREW_DIR env var: %s", linuxbrew_dir_from_env) return # if self.compiler_prefix: # compiler_prefix_basename = os.path.basename(self.compiler_prefix) # if compiler_prefix_basename.startswith('linuxbrew'): # g_linuxbrew_dir = self.compiler_prefix # log("Setting Linuxbrew directory based on compiler prefix %s", # self.compiler_prefix) if g_linuxbrew_dir: log("Linuxbrew directory: %s", g_linuxbrew_dir) new_path_entry = os.path.join(g_linuxbrew_dir, 'bin') log("Adding PATH entry: %s", new_path_entry) add_path_entry(new_path_entry) else: log("Not using Linuxbrew")
def should_rebuild_dependency(self, dep: Dependency) -> bool: stamp_path = self.fs_layout.get_build_stamp_path_for_dependency( dep, self.build_type) old_build_stamp = None if os.path.exists(stamp_path): with open(stamp_path, 'rt') as inp: old_build_stamp = inp.read() new_build_stamp = self.get_build_stamp_for_dependency(dep) if dep.dir_name is not None: src_dir = self.fs_layout.get_source_path(dep) if not os.path.exists(src_dir): log("Have to rebuild %s (%s): source dir %s does not exist", dep.name, self.build_type, src_dir) return True if old_build_stamp == new_build_stamp: log("Not rebuilding %s (%s) -- nothing changed.", dep.name, self.build_type) return False else: log("Have to rebuild %s (%s):", dep.name, self.build_type) log("Old build stamp for %s (from %s):\n%s", dep.name, stamp_path, indent_lines(old_build_stamp)) log("New build stamp for %s:\n%s", dep.name, indent_lines(new_build_stamp)) return True
def select_dependencies_to_build(self) -> None: self.selected_dependencies = [] if self.args.dependencies: names = set([dep.name for dep in self.dependencies]) for dep in self.args.dependencies: if dep not in names: fatal( "Unknown dependency name: %s. Valid dependency names:\n%s", dep, (" " * 4 + ("\n" + " " * 4).join(sorted(names)))) for dep in self.dependencies: if dep.name in self.args.dependencies: self.selected_dependencies.append(dep) elif self.args.skip: skipped = set(self.args.skip.split(',')) log("Skipping dependencies: %s", sorted(skipped)) self.selected_dependencies = [] for dependency in self.dependencies: if dependency.name in skipped: skipped.remove(dependency.name) else: self.selected_dependencies.append(dependency) if skipped: raise ValueError("Unknown dependencies, cannot skip: %s" % sorted(skipped)) else: self.selected_dependencies = self.dependencies
def set_compiler(self, compiler_type: str) -> None: if is_mac(): if compiler_type != 'clang': raise ValueError( "Cannot set compiler type to %s on macOS, only clang is supported" % compiler_type) self.compiler_type = 'clang' else: self.compiler_type = compiler_type self.find_compiler_by_type(compiler_type) c_compiler = self.get_c_compiler() cxx_compiler = self.get_cxx_compiler() if self.use_compiler_wrapper: os.environ['YB_THIRDPARTY_REAL_C_COMPILER'] = c_compiler os.environ['YB_THIRDPARTY_REAL_CXX_COMPILER'] = cxx_compiler os.environ[ 'YB_THIRDPARTY_USE_CCACHE'] = '1' if self.use_ccache else '0' python_scripts_dir = os.path.join(YB_THIRDPARTY_DIR, 'python', 'yugabyte_db_thirdparty') os.environ['CC'] = os.path.join(python_scripts_dir, 'compiler_wrapper_cc.py') os.environ['CXX'] = os.path.join(python_scripts_dir, 'compiler_wrapper_cxx.py') else: os.environ['CC'] = c_compiler os.environ['CXX'] = cxx_compiler self._identify_compiler_version() log(f"C compiler: {self.cc_identification}") log(f"C++ compiler: {self.cxx_identification}")
def perform_pre_build_steps(self, dep: Dependency) -> None: log("") colored_log(YELLOW_COLOR, SEPARATOR) colored_log(YELLOW_COLOR, "Building %s (%s)", dep.name, self.build_type) colored_log(YELLOW_COLOR, SEPARATOR) self.download_manager.download_dependency( dep=dep, src_path=self.fs_layout.get_source_path(dep), archive_path=self.fs_layout.get_archive_path(dep)) archive_name = dep.get_archive_name() if archive_name: archive_path = os.path.join('downloads', archive_name) self.fossa_modules.append({ "fossa_module": { "name": f"{dep.name}-{dep.version}", "type": "raw", "target": os.path.basename(archive_path) }, "yb_metadata": { "url": dep.download_url, "sha256sum": self.download_manager.get_expected_checksum(archive_name) } })
def save_build_stamp_for_dependency(self, dep: Dependency) -> None: stamp = self.get_build_stamp_for_dependency(dep) stamp_path = self.fs_layout.get_build_stamp_path_for_dependency(dep, self.build_type) log("Saving new build stamp to '%s':\n%s", stamp_path, indent_lines(stamp)) with open(stamp_path, "wt") as out: out.write(stamp)
def run(self) -> None: self.compiler_choice.set_compiler( 'clang' if self.compiler_choice.use_only_clang() else 'gcc') if self.args.clean or self.args.clean_downloads: self.fs_layout.clean(self.selected_dependencies, self.args.clean_downloads) self.prepare_out_dirs() os.environ['PATH'] = ':'.join([ os.path.join(self.fs_layout.tp_installed_common_dir, 'bin'), os.path.join(self.fs_layout.tp_installed_llvm7_common_dir, 'bin'), os.environ['PATH'] ]) self.build_one_build_type(BUILD_TYPE_COMMON) build_types = [] if is_linux(): build_types.append(BUILD_TYPE_UNINSTRUMENTED) if self.compiler_choice.use_only_gcc(): if is_linux() and not self.compiler_choice.using_linuxbrew(): # Starting to support ASAN for GCC compilers # (not for the current GCC 5.5 build on Linuxbrew, though). build_types.append(BUILD_TYPE_ASAN) else: if self.compiler_choice.using_linuxbrew() or is_mac(): build_types.append(BUILD_TYPE_CLANG_UNINSTRUMENTED) if is_linux() and not self.args.skip_sanitizers: build_types.append(BUILD_TYPE_ASAN) build_types.append(BUILD_TYPE_TSAN) log(f"Full list of build types: {build_types}") for build_type in build_types: self.build_one_build_type(build_type)
def build_internal(even_more_cmake_args: List[str] = []) -> None: final_cmake_args = args + even_more_cmake_args log("CMake command line (one argument per line):\n%s" % "\n".join([(" " * 4 + sanitize_flags_line_for_log(line)) for line in final_cmake_args])) log_output(log_prefix, final_cmake_args) if build_tool == 'ninja': dep.postprocess_ninja_build_file(self, 'build.ninja') build_tool_cmd = [ build_tool, '-j{}'.format(get_make_parallelism()) ] + extra_build_tool_args log_output(log_prefix, build_tool_cmd) if should_install: log_output(log_prefix, [build_tool] + install_targets) with open('compile_commands.json') as compile_commands_file: compile_commands = json.load(compile_commands_file) for command_item in compile_commands: command_args = command_item['command'].split() if self.build_type == BUILD_TYPE_ASAN: assert_list_contains(command_args, '-fsanitize=address') assert_list_contains(command_args, '-fsanitize=undefined') if self.build_type == BUILD_TYPE_TSAN: assert_list_contains(command_args, '-fsanitize=thread')
def run(self) -> None: self.init_regex() heading("Scanning installed executables and libraries...") test_pass = True # files to examine are much reduced if we look only at bin and lib directories dir_pattern = re.compile('^(lib|libcxx|[s]bin)$') dirs = [os.path.join(self.tp_installed_dir, type) for type in BUILD_TYPES] for installed_dir in dirs: if not os.path.isdir(installed_dir): logging.info("Directory %s does not exist, skipping", installed_dir) continue with os.scandir(installed_dir) as candidate_dirs: for candidate in candidate_dirs: if dir_pattern.match(candidate.name): examine_path = os.path.join(installed_dir, candidate.name) for dirpath, dirnames, files in os.walk(examine_path): for file_name in files: full_path = os.path.join(dirpath, file_name) if os.path.islink(full_path): continue if not self.good_libs(full_path): test_pass = False if not test_pass: fatal(f"Found problematic library dependencies, using tool: {self.tool}") else: log("No problems found with library dependencies.")
def check_libs_for_file(self, file_path: str) -> bool: libout = subprocess.check_output(['otool', '-L', file_path]).decode('utf-8') if 'is not an object file' in libout: return True if not self.check_lib_deps(file_path, libout): return False min_supported_macos_version = get_min_supported_macos_version() # Additionally, check for the minimum macOS version encoded in the library file. otool_small_l_output = subprocess.check_output( ['otool', '-l', file_path]).decode('utf-8') section = "" for line in otool_small_l_output.split('\n'): line = line.strip() if line.endswith(':'): section = line if line.startswith('minos '): items = line.split() min_macos_version = items[1] if min_macos_version != min_supported_macos_version: log( "File %s has wrong minimum supported macOS version: %s. Full line:\n%s\n" "(output from 'otool -l'). Expected: %s, section: %s", file_path, min_macos_version, line, min_supported_macos_version, section) return False return True
def build_with_configure(self, log_prefix: str, extra_args: List[str] = [], configure_cmd: List[str] = ['./configure'], install: List[str] = ['install'], run_autogen: bool = False, autoconf: bool = False, src_subdir_name: Optional[str] = None) -> None: os.environ["YB_REMOTE_COMPILATION"] = "0" dir_for_build = os.getcwd() if src_subdir_name: dir_for_build = os.path.join(dir_for_build, src_subdir_name) with PushDir(dir_for_build): log("Building in %s", dir_for_build) if run_autogen: log_output(log_prefix, ['./autogen.sh']) if autoconf: log_output(log_prefix, ['autoreconf', '-i']) configure_args = (configure_cmd.copy() + ['--prefix={}'.format(self.prefix)] + extra_args) log_output(log_prefix, configure_args) log_output(log_prefix, ['make', '-j{}'.format(get_make_parallelism())]) if install: log_output(log_prefix, ['make'] + install)
def check_libs_for_file(self, file_path: str) -> bool: try: libout = subprocess.check_output(['ldd', file_path], stderr=subprocess.STDOUT, env={ 'LC_ALL': 'en_US.UTF-8' }).decode('utf-8') except subprocess.CalledProcessError as ex: if ex.returncode > 1: log("Unexpected exit code %d from ldd, file %s", ex.returncode, file_path) log(ex.stdout.decode('utf-8')) return False libout = ex.stdout.decode('utf-8') file_basename = os.path.basename(file_path) additional_allowed_pattern = None if file_basename.startswith('libc++abi.so.'): # One exception: libc++abi.so is not able to find libc++ because it loads the ASAN # runtime library that is part of the LLVM distribution and does not have the correct # rpath set. This happens on CentOS with our custom build of LLVM. We might be able to # fix this by specifying rpath correctly when building LLVM, but as of 12/2020 we just # ignore this error here. # # $ ldd installed/asan/libcxx/lib/libc++abi.so.1.0 # linux-vdso.so.1 => # libclang_rt.asan-x86_64.so => # $LLVM_DIR/lib/clang/11.0.0/lib/linux/libclang_rt.asan-x86_64.so # libclang_rt.ubsan_minimal-x86_64.so => # $LLVM_DIR/lib/clang/11.0.0/lib/linux/libclang_rt.ubsan_minimal-x86_64.so # libunwind.so.1 => installed/common/lib/libunwind.so.1 # libdl.so.2 => /lib64/libdl.so.2 # libpthread.so.0 => /lib64/libpthread.so.0 # libm.so.6 => /lib64/libm.so.6 # libc.so.6 => /lib64/libc.so.6 # libc++.so.1 => not found <-- THIS IS OK # libgcc_s.so.1 => /lib64/libgcc_s.so.1 # librt.so.1 => /lib64/librt.so.1 # /lib64/ld-linux-x86-64.so.2 # # Run # LD_DEBUG=all ldd installed/asan/libcxx/lib/libc++abi.so.1.0 # and notice the following line: # # file=libc++.so.1 [0]; # needed by $LLVM_DIR/lib/clang/11.0.0/lib/linux/libclang_rt.asan-x86_64.so # # Also running # ldd $LLVM_DIR/lib/clang/11.0.0/lib/linux/libclang_rt.asan-x86_64.so # # reports "libc++.so.1 => not found". additional_allowed_pattern = self.LIBCXX_NOT_FOUND return self.check_lib_deps(file_path, libout, additional_allowed_pattern)
def check_cxx_compiler_flag(self, flag: str) -> bool: compiler_path = self.compiler_choice.get_cxx_compiler() log(f"Checking if the compiler {compiler_path} accepts the flag {flag}" ) process = subprocess.Popen([compiler_path, '-x', 'c++', flag, '-'], stdin=subprocess.PIPE) assert process.stdin is not None process.stdin.write("int main() { return 0; }".encode('utf-8')) process.stdin.close() return process.wait() == 0
def log_and_set_env_var_to_list(env_var_map: Dict[str, Optional[str]], env_var_name: str, items: List[str]) -> None: value_str = ' '.join(items).strip() if value_str: log('Setting env var %s to %s', env_var_name, value_str) env_var_map[env_var_name] = value_str else: log('Unsetting env var %s', env_var_name) # When used with EnvVarContext, this will cause the environment variable to be unset. env_var_map[env_var_name] = None
def create_build_dir_and_prepare(self, dep: Dependency) -> str: src_dir = self.fs_layout.get_source_path(dep) if not os.path.isdir(src_dir): fatal("Directory '{}' does not exist".format(src_dir)) build_dir = self.fs_layout.get_build_dir_for_dependency(dep, self.build_type) mkdir_if_missing(build_dir) if dep.copy_sources: log("Bootstrapping %s from %s", build_dir, src_dir) subprocess.check_call(['rsync', '-a', src_dir + '/', build_dir]) return build_dir
def check_lib_deps( self, file_path: str, cmdout: str, additional_allowed_pattern: Optional[Pattern] = None) -> bool: status = True for line in cmdout.splitlines(): if (not self.okay_paths.match(line) and not (additional_allowed_pattern and additional_allowed_pattern.match(line))): if status: log(file_path + ":") status = False log("Bad path: %s", line) return status
def get_expected_checksum_and_maybe_add_to_file( self, filename: str, downloaded_path: str) -> str: if filename not in self.filename2checksum: if self.should_add_checksum: with open(self.checksum_file_path, 'rt') as inp: lines = inp.readlines() lines = [line.rstrip() for line in lines] checksum = compute_file_sha256(downloaded_path) lines.append("%s %s" % (checksum, filename)) with open(self.checksum_file_path, 'wt') as out: for line in lines: out.write(line + "\n") self.filename2checksum[filename] = checksum log("Added checksum for %s to %s: %s", filename, self.checksum_file_path, checksum) return checksum fatal("No expected checksum provided for {}".format(filename)) return self.filename2checksum[filename]
def build_one_build_type(self, build_type: str) -> None: if (build_type != BUILD_TYPE_COMMON and self.args.build_type is not None and build_type != self.args.build_type): log( "Skipping build type %s because build type %s is specified in the arguments", build_type, self.args.build_type) return self.set_build_type(build_type) build_group = (BUILD_GROUP_COMMON if build_type == BUILD_TYPE_COMMON else BUILD_GROUP_INSTRUMENTED) for dep in self.selected_dependencies: self.perform_pre_build_steps(dep) if (dep.build_group == build_group and dep.should_build(self) and self.should_rebuild_dependency(dep)): self.build_dependency(dep)
def perform_pre_build_steps(self, dep: Dependency) -> None: log("") colored_log(YELLOW_COLOR, SEPARATOR) colored_log(YELLOW_COLOR, "Building %s (%s)", dep.name, self.build_type) colored_log(YELLOW_COLOR, SEPARATOR) self.download_manager.download_dependency( dep=dep, src_path=self.fs_layout.get_source_path(dep), archive_path=self.fs_layout.get_archive_path(dep)) if self.args.license_report: notices: Dict[str, List[str]] = {} src_path = os.path.abspath(self.fs_layout.get_source_path(dep)) for root, dirs, files in os.walk(src_path): for name in files: if os.path.splitext(name)[0] in ['LICENSE', 'COPYING', 'NOTICE', 'CREDITS']: file_path = os.path.join(root, name) rel_path = os.path.relpath( os.path.abspath(file_path), src_path ) log("Found copyright notice file: %s", file_path) with open(file_path) as input_file: notice = input_file.read() notice = notice.rstrip() if notice not in notices: notices[notice] = [] notices[notice].append(rel_path) self.license_report.append( { "name": dep.name, "version": dep.version, "copyright_notices": [ { "notice": notice, "files": sorted(files) } for notice, files in notices.items() ], "url": dep.download_url } )
def fix_shared_library_references( install_prefix: str, lib_name_prefix: str) -> None: if not is_mac(): return lib_dir = os.path.realpath(os.path.join(install_prefix, "lib")) lib_paths = glob.glob(os.path.join(lib_dir, lib_name_prefix + "*.dylib")) bin_dir = os.path.realpath(os.path.join(install_prefix, "sbin")) bin_paths = glob.glob(os.path.join(bin_dir, "*")) for lib in lib_paths + bin_paths: if os.path.islink(lib): continue lib_basename = os.path.basename(lib) otool_output = subprocess.check_output(['otool', '-L', lib]).decode('utf-8') for line in otool_output.split('\n'): if line.startswith('\t' + lib_name_prefix): dependency_name = line.strip().split()[0] dependency_real_name = os.path.relpath( os.path.realpath(os.path.join(lib_dir, dependency_name)), lib_dir) if lib_basename in [dependency_name, dependency_real_name]: log("Making %s refer to itself using @rpath", lib) subprocess.check_call([ 'install_name_tool', '-id', '@rpath/' + dependency_name, lib ]) else: log("Making %s refer to %s using @loader_path", lib, dependency_name) subprocess.check_call([ 'install_name_tool', '-change', dependency_name, '@loader_path/' + dependency_name, lib ])
def build_dependency(self, dep: Dependency) -> None: self.init_flags(dep) # This is needed at least for glog to be able to find gflags. self.add_rpath( os.path.join(self.fs_layout.tp_installed_dir, self.build_type, 'lib')) if self.build_type != BUILD_TYPE_COMMON: # Needed to find libunwind for Clang 10 when using compiler-rt. self.add_rpath( os.path.join(self.fs_layout.tp_installed_dir, BUILD_TYPE_COMMON, 'lib')) if self.args.download_extract_only: log( "Skipping build of dependency %s, build type %s, --download-extract-only is " "specified.", dep.name, self.build_type) return env_vars: Dict[str, Optional[str]] = { "CPPFLAGS": " ".join(self.preprocessor_flags) } log_and_set_env_var_to_list(env_vars, 'CXXFLAGS', self.get_effective_cxx_flags(dep)) log_and_set_env_var_to_list(env_vars, 'CFLAGS', self.get_effective_c_flags(dep)) log_and_set_env_var_to_list(env_vars, 'LDFLAGS', self.get_effective_ld_flags(dep)) log_and_set_env_var_to_list(env_vars, 'LIBS', self.libs) log_and_set_env_var_to_list(env_vars, 'CPPFLAGS', self.get_effective_preprocessor_flags(dep)) if self.build_type == BUILD_TYPE_ASAN: # To avoid errors similar to: # https://gist.githubusercontent.com/mbautin/4b8eec566f54bcc35706dcd97cab1a95/raw # # This could also be fixed to some extent by the compiler flags # -mllvm -asan-use-private-alias=1 # but applying that flag to all builds is complicated in practice and is probably # best done using a compiler wrapper script, which would slow things down. env_vars["ASAN_OPTIONS"] = "detect_odr_violation=0" with PushDir(self.create_build_dir_and_prepare(dep)): with EnvVarContext(**env_vars): write_env_vars('yb_dependency_env.sh') dep.build(self) self.save_build_stamp_for_dependency(dep) log("") log("Finished building %s (%s)", dep.name, self.build_type) log("")
def get_expected_checksum_and_maybe_add_to_file( self, file_name: str, downloaded_path: Optional[str]) -> Optional[str]: if file_name not in self.file_name_to_checksum: if self.should_add_checksum and downloaded_path: with open(self.checksum_file_path, 'rt') as inp: lines = inp.readlines() lines = [line.rstrip() for line in lines] checksum = compute_file_sha256(downloaded_path) lines.append("%s %s" % (checksum, file_name)) with open(self.checksum_file_path, 'wt') as out: for line in lines: out.write(line + "\n") self.file_name_to_checksum[file_name] = checksum log("Added checksum for %s to %s: %s", file_name, self.checksum_file_path, checksum) return checksum return None return self.file_name_to_checksum[file_name]
def check_lib_deps( self, file_path: str, cmdout: str, additional_allowed_pattern: Optional[Pattern] = None) -> bool: status = True for line in cmdout.splitlines(): if (not self.allowed_patterns.match(line) and not (additional_allowed_pattern is not None and additional_allowed_pattern.match(line))): # Log the allowed patterns for easier debugging. for allowed_pattern in [self.allowed_patterns] + ([ additional_allowed_pattern ] if additional_allowed_pattern else []): if allowed_pattern.pattern not in self.logged_allowed_patterns: log("Allowed pattern: %s", allowed_pattern.pattern) self.logged_allowed_patterns.add( allowed_pattern.pattern) if status: log(file_path + ":") status = False log("Bad path: %s", line) return status
def __init__(self, conf_name_pattern: str) -> None: self.common_conf = MultiBuildCommonConf() latest_link_path = os.path.join(self.common_conf.dir_of_all_runs, 'latest') mkdir_if_missing(os.path.dirname(self.common_conf.checkout_dir)) if os.path.exists(latest_link_path): os.remove(latest_link_path) os.symlink(os.path.basename(self.common_conf.root_run_dir), latest_link_path) copy_code_to(self.common_conf.checkout_dir) with open(CIRCLECI_CONFIG_PATH) as circleci_conf_file: circleci_conf = yaml.load(circleci_conf_file, Loader=yaml.SafeLoader) self.configurations = [] for circleci_job in circleci_conf['workflows']['build-release'][ 'jobs']: build_params = circleci_job['build'] conf = BuildConfiguration( common_conf=self.common_conf, name=build_params['name'], docker_image=build_params['docker_image'], archive_name_suffix=build_params['archive_name_suffix'], build_thirdparty_args=build_params.get('build_thirdparty_args', '')) if not conf_name_pattern: self.configurations.append(conf) continue if not fnmatch.fnmatch(conf.name, conf_name_pattern): log("Skipping configuration '%s' (does not match pattern %s)", conf.name, conf_name_pattern) continue self.configurations.append(conf) for conf in self.configurations: log("Will build configuration: %s", conf.name)
def _remove_path_for_dependency( self, dep: Dependency, path: Optional[str], description: str) -> None: full_description = f"{description} for dependency {dep.name}" if path is None: log(f"Path to {full_description} is not defined") return if os.path.exists(path): log(f"Removing {full_description} at {path}") remove_path(path) else: log(f"Could not find {full_description} at {path}, nothing to remove")
def set_build_type(self, build_type: str) -> None: self.build_type = build_type self.prefix = self.get_install_prefix_with_qualifier(qualifier=None) self.prefix_bin = os.path.join(self.prefix, 'bin') self.prefix_lib = os.path.join(self.prefix, 'lib') self.prefix_include = os.path.join(self.prefix, 'include') if self.compiler_choice.building_with_clang(build_type): compiler = 'clang' else: compiler = 'gcc' self.compiler_choice.set_compiler(compiler) heading("Building {} dependencies (compiler type: {})".format( build_type, self.compiler_choice.compiler_type)) log("Compiler type: %s", self.compiler_choice.compiler_type) log("C compiler: %s", self.compiler_choice.get_c_compiler()) log("C++ compiler: %s", self.compiler_choice.get_cxx_compiler())
def add_rpath(self, path: str) -> None: log("Adding RPATH: %s", path) self.ld_flags.append(get_rpath_flag(path)) self.additional_allowed_shared_lib_paths.add(path)
def detect_linuxbrew(self) -> None: self.linuxbrew_dir = None if not is_linux(): log("Not using Linuxbrew -- this is not Linux") return if self.single_compiler_type: log("Not using Linuxbrew, single_compiler_type is set to %s", self.single_compiler_type) return if self.compiler_suffix: log("Not using Linuxbrew, compiler_suffix is set to %s", self.compiler_suffix) return if self.compiler_prefix: compiler_prefix_basename = os.path.basename(self.compiler_prefix) if compiler_prefix_basename.startswith('linuxbrew'): self.linuxbrew_dir = self.compiler_prefix log("Setting Linuxbrew directory based on compiler prefix %s", self.compiler_prefix) if self.linuxbrew_dir is None: linuxbrew_dir_from_env = os.getenv('YB_LINUXBREW_DIR') if linuxbrew_dir_from_env: self.linuxbrew_dir = linuxbrew_dir_from_env log( "Setting Linuxbrew directory based on YB_LINUXBREW_DIR env var: %s", linuxbrew_dir_from_env) if self.linuxbrew_dir: log("Linuxbrew directory: %s", self.linuxbrew_dir) new_path_entry = os.path.join(self.linuxbrew_dir, 'bin') log("Adding PATH entry: %s", new_path_entry) add_path_entry(new_path_entry)
def build_with_cmake(self, dep: Dependency, extra_args: List[str] = [], use_ninja_if_available: bool = True, src_subdir_name: Optional[str] = None, extra_build_tool_args: List[str] = [], should_install: bool = True, install_targets: List[str] = ['install'], shared_and_static: bool = False) -> None: build_tool = 'make' if use_ninja_if_available: ninja_available = is_ninja_available() log('Ninja is %s', 'available' if ninja_available else 'unavailable') if ninja_available: build_tool = 'ninja' log("Building dependency %s using CMake. Build tool: %s", dep, build_tool) log_prefix = self.log_prefix(dep) os.environ["YB_REMOTE_COMPILATION"] = "0" remove_path('CMakeCache.txt') remove_path('CMakeFiles') src_path = self.fs_layout.get_source_path(dep) if src_subdir_name is not None: src_path = os.path.join(src_path, src_subdir_name) args = ['cmake', src_path] if build_tool == 'ninja': args += ['-G', 'Ninja'] args += self.get_common_cmake_flag_args(dep) if extra_args is not None: args += extra_args args += dep.get_additional_cmake_args(self) if shared_and_static and any( arg.startswith('-DBUILD_SHARED_LIBS=') for arg in args): raise ValueError( "shared_and_static=True is specified but CMake arguments already mention " "-DBUILD_SHARED_LIBS: %s" % args) if '-DBUILD_SHARED_LIBS=OFF' not in args and not shared_and_static: # TODO: a better approach for setting CMake arguments from multiple places. args.append('-DBUILD_SHARED_LIBS=ON') def build_internal(even_more_cmake_args: List[str] = []) -> None: final_cmake_args = args + even_more_cmake_args log("CMake command line (one argument per line):\n%s" % "\n".join([(" " * 4 + sanitize_flags_line_for_log(line)) for line in final_cmake_args])) log_output(log_prefix, final_cmake_args) if build_tool == 'ninja': dep.postprocess_ninja_build_file(self, 'build.ninja') build_tool_cmd = [ build_tool, '-j{}'.format(get_make_parallelism()) ] + extra_build_tool_args log_output(log_prefix, build_tool_cmd) if should_install: log_output(log_prefix, [build_tool] + install_targets) with open('compile_commands.json') as compile_commands_file: compile_commands = json.load(compile_commands_file) for command_item in compile_commands: command_args = command_item['command'].split() if self.build_type == BUILD_TYPE_ASAN: assert_list_contains(command_args, '-fsanitize=address') assert_list_contains(command_args, '-fsanitize=undefined') if self.build_type == BUILD_TYPE_TSAN: assert_list_contains(command_args, '-fsanitize=thread') if shared_and_static: for build_shared_libs_value, subdir_name in (('ON', 'shared'), ('OFF', 'static')): build_dir = os.path.join(os.getcwd(), subdir_name) mkdir_if_missing(build_dir) build_shared_libs_cmake_arg = '-DBUILD_SHARED_LIBS=%s' % build_shared_libs_value log( "Building dependency '%s' for build type '%s' with option: %s", dep.name, self.build_type, build_shared_libs_cmake_arg) with PushDir(build_dir): build_internal([build_shared_libs_cmake_arg]) else: build_internal()
def init_linux_clang1x_flags(self, dep: Dependency) -> None: """ Flags for Clang 10 and beyond. We are using LLVM-supplied libunwind and compiler-rt in this configuration. """ self.ld_flags.append('-rtlib=compiler-rt') if self.build_type == BUILD_TYPE_COMMON: log( "Not configuring any special Clang 10+ flags for build type %s", self.build_type) return # TODO mbautin: refactor to polymorphism is_libcxxabi = dep.name.endswith('_libcxxabi') is_libcxx = dep.name.endswith('_libcxx') log("Dependency name: %s, is_libcxxabi: %s, is_libcxx: %s", dep.name, is_libcxxabi, is_libcxx) if self.build_type == BUILD_TYPE_ASAN: self.compiler_flags.append('-shared-libasan') if is_libcxxabi: # To avoid an infinite loop in UBSAN. # https://monorail-prod.appspot.com/p/chromium/issues/detail?id=609786 # This comment: # https://gist.githubusercontent.com/mbautin/ad9ea4715669da3b3a5fb9495659c4a9/raw self.compiler_flags.append('-fno-sanitize=vptr') # TODO mbautin: a centralized way to find paths inside LLVM installation. assert self.compiler_choice.cc is not None compiler_rt_lib_dir_ancestor = os.path.join( os.path.dirname( os.path.dirname(os.path.realpath( self.compiler_choice.cc))), 'lib', 'clang') compiler_rt_lib_dir_candidates = [] nonexistent_compiler_rt_lib_dirs = [] for llvm_version_subdir in os.listdir( compiler_rt_lib_dir_ancestor): compiler_rt_lib_dir = os.path.join( compiler_rt_lib_dir_ancestor, llvm_version_subdir, 'lib', 'linux') if os.path.isdir(compiler_rt_lib_dir): compiler_rt_lib_dir_candidates.append(compiler_rt_lib_dir) else: nonexistent_compiler_rt_lib_dirs.append( compiler_rt_lib_dir) if len(compiler_rt_lib_dir_candidates) != 1: if not compiler_rt_lib_dir_candidates: raise IOError( "Could not find the compiler-rt library directory, looked at: %s" % nonexistent_compiler_rt_lib_dirs) raise IOError( "Multiple possible compiler-rt library directories: %s" % compiler_rt_lib_dir_candidates) assert len(compiler_rt_lib_dir_candidates) == 1 compiler_rt_lib_dir = compiler_rt_lib_dir_candidates[0] if not os.path.isdir(compiler_rt_lib_dir): raise IOError("Directory does not exist: %s", compiler_rt_lib_dir) self.add_lib_dir_and_rpath(compiler_rt_lib_dir) self.ld_flags.append('-lclang_rt.ubsan_minimal-x86_64') self.ld_flags += ['-lunwind'] libcxx_installed_include, libcxx_installed_lib = self.get_libcxx_dirs( self.build_type) log("libc++ include directory: %s", libcxx_installed_include) log("libc++ library directory: %s", libcxx_installed_lib) if not is_libcxx and not is_libcxxabi: log("Adding special compiler/linker flags for Clang 10+ for dependencies other than " "libc++") self.ld_flags += ['-lc++', '-lc++abi'] self.cxx_flags = [ '-stdlib=libc++', '-isystem', libcxx_installed_include, '-nostdinc++' ] + self.cxx_flags self.prepend_lib_dir_and_rpath(libcxx_installed_lib) if is_libcxx: log("Adding special compiler/linker flags for Clang 10 or newer for libc++" ) # This is needed for libc++ to find libc++abi headers. assert_dir_exists(libcxx_installed_include) self.cxx_flags.append('-I%s' % libcxx_installed_include) if is_libcxx or is_libcxxabi: log("Adding special linker flags for Clang 10 or newer for libc++ or libc++abi" ) # libc++abi needs to be able to find libcxx at runtime, even though it can't always find # it at build time because libc++abi is built first. self.add_rpath(libcxx_installed_lib) self.cxx_flags.append('-Wno-error=unused-command-line-argument') log("Flags after the end of setup for Clang 10 or newer:") log("cxx_flags : %s", self.cxx_flags) log("c_flags : %s", self.c_flags) log("ld_flags : %s", self.ld_flags)