def copy_code_to(dest_dir: str) -> None: parent_dir = os.path.dirname(dest_dir) assert os.path.isdir( parent_dir), 'Directory %s does not exist' % parent_dir assert not os.path.exists(dest_dir), 'Already exists: %s' % dest_dir with PushDir(YB_THIRDPARTY_DIR): current_branch_name = get_current_git_branch_name() log_and_run_cmd(['git', 'clone', YB_THIRDPARTY_DIR, dest_dir]) with PushDir(dest_dir): log_and_run_cmd(['git', 'checkout', current_branch_name]) rsync_code_to(dest_dir)
def build_with_configure(self, log_prefix: str, extra_args: List[str] = [], configure_cmd: List[str] = ['./configure'], install: List[str] = ['install'], run_autogen: bool = False, autoconf: bool = False, src_subdir_name: Optional[str] = None) -> None: os.environ["YB_REMOTE_COMPILATION"] = "0" dir_for_build = os.getcwd() if src_subdir_name: dir_for_build = os.path.join(dir_for_build, src_subdir_name) with PushDir(dir_for_build): log("Building in %s", dir_for_build) if run_autogen: log_output(log_prefix, ['./autogen.sh']) if autoconf: log_output(log_prefix, ['autoreconf', '-i']) configure_args = (configure_cmd.copy() + ['--prefix={}'.format(self.prefix)] + extra_args) log_output(log_prefix, configure_args) log_output(log_prefix, ['make', '-j{}'.format(get_make_parallelism())]) if install: log_output(log_prefix, ['make'] + install)
def get_build_stamp_for_dependency(self, dep: Dependency) -> str: module_name = dep.__class__.__module__ assert isinstance(module_name, str), "Dependency's module is not a string: %s" % module_name assert module_name.startswith('build_definitions.'), "Invalid module name: %s" % module_name module_name_components = module_name.split('.') assert len(module_name_components) == 2, ( "Expected two components: %s" % module_name_components) module_name_final = module_name_components[-1] input_files_for_stamp = [ 'python/yugabyte_db_thirdparty/yb_build_thirdparty_main.py', 'build_thirdparty.sh', os.path.join('python', 'build_definitions', '%s.py' % module_name_final) ] for path in input_files_for_stamp: abs_path = os.path.join(YB_THIRDPARTY_DIR, path) if not os.path.exists(abs_path): fatal("File '%s' does not exist -- expecting it to exist when creating a 'stamp' " "for the build configuration of '%s'.", abs_path, dep.name) with PushDir(YB_THIRDPARTY_DIR): git_commit_sha1 = subprocess.check_output( ['git', 'log', '--pretty=%H', '-n', '1'] + input_files_for_stamp ).strip().decode('utf-8') build_stamp = 'git_commit_sha1={}\n'.format(git_commit_sha1) for git_extra_arg in (None, '--cached'): git_extra_args = [git_extra_arg] if git_extra_arg else [] git_diff = subprocess.check_output( ['git', 'diff'] + git_extra_args + input_files_for_stamp) git_diff_sha256 = hashlib.sha256(git_diff).hexdigest() build_stamp += 'git_diff_sha256{}={}\n'.format( '_'.join(git_extra_args).replace('--', '_'), git_diff_sha256) return build_stamp
def build_dependency(self, dep: Dependency) -> None: self.init_flags(dep) # This is needed at least for glog to be able to find gflags. self.add_rpath( os.path.join(self.fs_layout.tp_installed_dir, self.build_type, 'lib')) if self.build_type != BUILD_TYPE_COMMON: # Needed to find libunwind for Clang 10 when using compiler-rt. self.add_rpath( os.path.join(self.fs_layout.tp_installed_dir, BUILD_TYPE_COMMON, 'lib')) if self.args.download_extract_only: log( "Skipping build of dependency %s, build type %s, --download-extract-only is " "specified.", dep.name, self.build_type) return env_vars: Dict[str, Optional[str]] = { "CPPFLAGS": " ".join(self.preprocessor_flags) } log_and_set_env_var_to_list(env_vars, 'CXXFLAGS', self.get_effective_cxx_flags(dep)) log_and_set_env_var_to_list(env_vars, 'CFLAGS', self.get_effective_c_flags(dep)) log_and_set_env_var_to_list(env_vars, 'LDFLAGS', self.get_effective_ld_flags(dep)) log_and_set_env_var_to_list(env_vars, 'LIBS', self.libs) log_and_set_env_var_to_list(env_vars, 'CPPFLAGS', self.get_effective_preprocessor_flags(dep)) if self.build_type == BUILD_TYPE_ASAN: # To avoid errors similar to: # https://gist.githubusercontent.com/mbautin/4b8eec566f54bcc35706dcd97cab1a95/raw # # This could also be fixed to some extent by the compiler flags # -mllvm -asan-use-private-alias=1 # but applying that flag to all builds is complicated in practice and is probably # best done using a compiler wrapper script, which would slow things down. env_vars["ASAN_OPTIONS"] = "detect_odr_violation=0" with PushDir(self.create_build_dir_and_prepare(dep)): with EnvVarContext(**env_vars): write_env_vars('yb_dependency_env.sh') dep.build(self) self.save_build_stamp_for_dependency(dep) log("") log("Finished building %s (%s)", dep.name, self.build_type) log("")
def build(self) -> BuildResult: with PushDir(self.conf_run_dir): home_dir_in_container = '/home/yugabyteci' pip_cache_dir_in_container = os.path.join(home_dir_in_container, '.cache', 'pip') readonly_checkout_in_container = '/opt/yb-build/readonly-code/yugabyte-db-thirdparty' rw_checkout_in_container = '/opt/yb-build/thirdparty/checkout' sudo_cmd = 'sudo -u yugabyteci ' # TODO: create a shell script in the checkout directory outside container with all the # right settings so we can rerun it manually easily if needed. bash_script = '; '.join([ f"set -euxo pipefail", 'mkdir -p /root/.cache/pip', 'chmod a+rX /root', 'chmod a+rX /root/.cache', 'chmod -R a+rX /root/.cache/pip', # Here, before we switch user to yugabyteci, we can do things as root if needed. 'sudo -u yugabyteci /bin/bash -c ' + shlex.quote('; '.join([ 'set -euxo pipefail', f'mkdir -p {pip_cache_dir_in_container}', f'export PIP_DOWNLOAD_CACHE={pip_cache_dir_in_container}', f"export YB_THIRDPARTY_ARCHIVE_NAME_SUFFIX={self.archive_name_suffix}", f"export YB_BUILD_THIRDPARTY_ARGS='{self.build_thirdparty_args}'", f"cp -R {readonly_checkout_in_container} {rw_checkout_in_container}", f"cd {rw_checkout_in_container}", "./build_and_release.sh" ])) ]) container_name = f'{self.name}-{self.common_conf.timestamp_str}' docker_run_cmd_args = [ 'docker', 'run', '--name', container_name, '--cap-add=SYS_PTRACE', '--mount', ','.join([ 'type=bind', f'source={self.common_conf.checkout_dir}', f'target={readonly_checkout_in_container}', 'readonly', ]), self.docker_image, 'bash', '-c', bash_script ] self.log_with_prefix("Running command: %s", shlex_join(docker_run_cmd_args)) self.log_with_prefix("Logging to: %s", self.output_file_path) start_time_sec = time.time() with open(self.output_file_path, 'wb') as output_file: docker_run_process = subprocess.Popen(docker_run_cmd_args, stdout=output_file, stderr=subprocess.STDOUT) docker_run_process.wait() elapsed_time_sec = time.time() - start_time_sec self.log_with_prefix("Return code: %d, elapsed time: %.1f sec", docker_run_process.returncode, elapsed_time_sec) return BuildResult()
def rsync_code_to(rsync_dest: str) -> None: with PushDir(YB_THIRDPARTY_DIR): excluded_files_bytes: bytes = subprocess.check_output( ['git', 'ls-files', '--exclude-standard', '-oi', '--directory']) assert os.path.isdir('.git') excluded_files_path = os.path.join(os.getcwd(), '.git', 'ignores.tmp') with open(excluded_files_path, 'wb') as excluded_files_file: excluded_files_file.write(excluded_files_bytes) rsync_cmd = [ 'rsync', '--archive', '--verbose', '--human-readable', '--delete', '--checksum', '--exclude', '.git', '--exclude-from=%s' % excluded_files_path, '.', rsync_dest ] log_and_run_cmd(rsync_cmd)
def build_with_cmake(self, dep: Dependency, extra_args: List[str] = [], use_ninja_if_available: bool = True, src_subdir_name: Optional[str] = None, extra_build_tool_args: List[str] = [], should_install: bool = True, install_targets: List[str] = ['install'], shared_and_static: bool = False) -> None: build_tool = 'make' if use_ninja_if_available: ninja_available = is_ninja_available() log('Ninja is %s', 'available' if ninja_available else 'unavailable') if ninja_available: build_tool = 'ninja' log("Building dependency %s using CMake. Build tool: %s", dep, build_tool) log_prefix = self.log_prefix(dep) os.environ["YB_REMOTE_COMPILATION"] = "0" remove_path('CMakeCache.txt') remove_path('CMakeFiles') src_path = self.fs_layout.get_source_path(dep) if src_subdir_name is not None: src_path = os.path.join(src_path, src_subdir_name) args = ['cmake', src_path] if build_tool == 'ninja': args += ['-G', 'Ninja'] args += self.get_common_cmake_flag_args(dep) if extra_args is not None: args += extra_args args += dep.get_additional_cmake_args(self) if shared_and_static and any( arg.startswith('-DBUILD_SHARED_LIBS=') for arg in args): raise ValueError( "shared_and_static=True is specified but CMake arguments already mention " "-DBUILD_SHARED_LIBS: %s" % args) if '-DBUILD_SHARED_LIBS=OFF' not in args and not shared_and_static: # TODO: a better approach for setting CMake arguments from multiple places. args.append('-DBUILD_SHARED_LIBS=ON') def build_internal(even_more_cmake_args: List[str] = []) -> None: final_cmake_args = args + even_more_cmake_args log("CMake command line (one argument per line):\n%s" % "\n".join([(" " * 4 + sanitize_flags_line_for_log(line)) for line in final_cmake_args])) log_output(log_prefix, final_cmake_args) if build_tool == 'ninja': dep.postprocess_ninja_build_file(self, 'build.ninja') build_tool_cmd = [ build_tool, '-j{}'.format(get_make_parallelism()) ] + extra_build_tool_args log_output(log_prefix, build_tool_cmd) if should_install: log_output(log_prefix, [build_tool] + install_targets) with open('compile_commands.json') as compile_commands_file: compile_commands = json.load(compile_commands_file) for command_item in compile_commands: command_args = command_item['command'].split() if self.build_type == BUILD_TYPE_ASAN: assert_list_contains(command_args, '-fsanitize=address') assert_list_contains(command_args, '-fsanitize=undefined') if self.build_type == BUILD_TYPE_TSAN: assert_list_contains(command_args, '-fsanitize=thread') if shared_and_static: for build_shared_libs_value, subdir_name in (('ON', 'shared'), ('OFF', 'static')): build_dir = os.path.join(os.getcwd(), subdir_name) mkdir_if_missing(build_dir) build_shared_libs_cmake_arg = '-DBUILD_SHARED_LIBS=%s' % build_shared_libs_value log( "Building dependency '%s' for build type '%s' with option: %s", dep.name, self.build_type, build_shared_libs_cmake_arg) with PushDir(build_dir): build_internal([build_shared_libs_cmake_arg]) else: build_internal()
def build_remotely(remote_server: str, remote_build_code_path: str) -> None: assert remote_server is not None assert remote_build_code_path is not None assert remote_build_code_path.startswith('/') def run_ssh_cmd(ssh_args: List[str]) -> None: log_and_run_cmd(['ssh', remote_server] + ssh_args) def run_remote_bash_script(bash_script: str) -> None: bash_script = bash_script.strip() log("Running script remotely: %s", bash_script) # TODO: why exactly do we need shlex.quote here? run_ssh_cmd(['bash', '-c', shlex.quote(bash_script)]) quoted_remote_path = shlex.quote(remote_build_code_path) # Ensure the remote directory exists. We are not attempting to create it if it does not. run_remote_bash_script('[[ -d %s ]]' % quoted_remote_path) with PushDir(YB_THIRDPARTY_DIR): local_branch_name = get_current_git_branch_name() local_git_remotes = subprocess.check_output( shlex.split('git remote -v')).decode('utf-8') remote_url = '%s:%s' % (remote_server, remote_build_code_path) preferred_remote_name = 'remote-build-%s' % remote_server remote_name = None for remote_line in local_git_remotes.split('\n'): remote_line = remote_line.strip() if not remote_line: continue remote_components = remote_line.split('\t') if remote_components[1].endswith(' (push)'): parsed_remote_url = remote_components[1][:-7].strip() if parsed_remote_url == remote_url: remote_name = remote_components[0] log("Found existing remote %s for %s", remote_name, remote_url) break if remote_name is None: log_and_run_cmd( ['git', 'remote', 'add', preferred_remote_name, remote_url]) remote_name = preferred_remote_name log("Local branch name: %s, checking it out remotely", local_branch_name) run_remote_bash_script(f""" set -euo pipefail cd {quoted_remote_path} git reset --hard HEAD git clean -df git checkout master """) log_and_run_cmd([ 'git', 'push', '--force', remote_name, '%s:%s' % (local_branch_name, local_branch_name) ]) run_remote_bash_script( 'cd %s && git checkout %s' % (quoted_remote_path, shlex.quote(local_branch_name))) rsync_code_to('%s:%s' % (remote_server, remote_build_code_path)) remote_bash_script = 'cd %s && ./build_thirdparty.sh %s' % ( quoted_remote_path, shlex_join(sys.argv[1:])) run_remote_bash_script(remote_bash_script)
def extract_archive( self, archive_file_name: str, out_dir: str, out_name: Optional[str] = None) -> None: """ Extract the given archive into a subdirectory of out_dir, optionally renaming it to the specified name out_name. The archive is expected to contain exactly one directory. If out_name is not specified, the name of the directory inside the archive becomes the name of the destination directory. out_dir is the parent directory that should contain the extracted directory when the function returns. """ def dest_dir_already_exists(full_out_path: str) -> bool: if os.path.exists(full_out_path): log("Directory already exists: %s, skipping extracting %s" % ( full_out_path, archive_file_name)) return True return False full_out_path = None if out_name: full_out_path = os.path.join(out_dir, out_name) if dest_dir_already_exists(full_out_path): return # Extract the archive into a temporary directory. tmp_out_dir = os.path.join( out_dir, 'tmp-extract-%s-%s' % ( os.path.basename(archive_file_name), get_temporal_randomized_file_name_suffix() )) if os.path.exists(tmp_out_dir): raise IOError("Just-generated unique directory name already exists: %s" % tmp_out_dir) os.makedirs(tmp_out_dir) archive_extension = None for ext in ARCHIVE_TYPES: if archive_file_name.endswith(ext): archive_extension = ext break if not archive_extension: fatal("Unknown archive type for: {}".format(archive_file_name)) assert archive_extension is not None try: with PushDir(tmp_out_dir): cmd = ARCHIVE_TYPES[archive_extension].format(archive_file_name) log("Extracting %s in temporary directory %s", cmd, tmp_out_dir) subprocess.check_call(cmd, shell=True) extracted_subdirs = [ subdir_name for subdir_name in os.listdir(tmp_out_dir) if not subdir_name.startswith('.') ] if len(extracted_subdirs) != 1: raise IOError( "Expected the extracted archive %s to contain exactly one " "subdirectory and no files, found: %s" % ( archive_file_name, extracted_subdirs)) extracted_subdir_basename = extracted_subdirs[0] extracted_subdir_path = os.path.join(tmp_out_dir, extracted_subdir_basename) if not os.path.isdir(extracted_subdir_path): raise IOError( "This is a file, expected it to be a directory: %s" % extracted_subdir_path) if not full_out_path: full_out_path = os.path.join(out_dir, extracted_subdir_basename) if dest_dir_already_exists(full_out_path): return log("Moving %s to %s", extracted_subdir_path, full_out_path) shutil.move(extracted_subdir_path, full_out_path) finally: log("Removing temporary directory: %s", tmp_out_dir) shutil.rmtree(tmp_out_dir)
def download_dependency( self, dep: Dependency, src_path: str, archive_path: Optional[str]) -> None: patch_level_path = os.path.join(src_path, 'patchlevel-{}'.format(dep.patch_version)) if os.path.exists(patch_level_path): return download_url = dep.download_url assert download_url is not None, "Download URL not specified for dependency %s" % dep.name remove_path(src_path) # If download_url is "mkdir" then we just create empty directory with specified name. if download_url != 'mkdir': if archive_path is None: return self.ensure_file_downloaded( url=download_url, file_path=archive_path, enable_using_alternative_url=True) self.extract_archive(archive_path, os.path.dirname(src_path), os.path.basename(src_path)) else: log("Creating %s", src_path) mkdir_if_missing(src_path) if hasattr(dep, 'extra_downloads'): for extra in dep.extra_downloads: assert extra.archive_name is not None archive_path = os.path.join(self.download_dir, extra.archive_name) log("Downloading %s from %s", extra.archive_name, extra.download_url) self.ensure_file_downloaded( url=extra.download_url, file_path=archive_path, enable_using_alternative_url=True) output_path = os.path.join(src_path, extra.dir_name) self.extract_archive(archive_path, output_path) if extra.post_exec is not None: with PushDir(output_path): assert isinstance(extra.post_exec, list) if isinstance(extra.post_exec[0], str): subprocess.check_call(cast(List[str], extra.post_exec)) else: for command in extra.post_exec: subprocess.check_call(command) if hasattr(dep, 'patches'): with PushDir(src_path): for patch in dep.patches: log("Applying patch: %s", patch) process = subprocess.Popen(['patch', '-p{}'.format(dep.patch_strip)], stdin=subprocess.PIPE) with open(os.path.join(YB_THIRDPARTY_DIR, 'patches', patch), 'rt') as inp: patch = inp.read() assert process.stdin is not None process.stdin.write(patch.encode('utf-8')) process.stdin.close() exit_code = process.wait() if exit_code: fatal("Patch {} failed with code: {}".format(dep.name, exit_code)) if dep.post_patch: subprocess.check_call(dep.post_patch) with open(patch_level_path, 'wb') as out: # Just create an empty file. pass