def sync_postgres_source(self): logging.info("Syncing postgres source code") # Remove source code files from the build directory that have been removed from the # source directory. # TODO: extend this to the complete list of source file types. run_program([ 'rsync', '-avz', '--include', '*.c', '--include', '*.h', '--include', 'Makefile', '--include', '*.am', '--include', '*.in', '--include', '*.mk', '--exclude', '*', '--delete', self.postgres_src_dir + '/', self.pg_build_root], capture_output=False) run_program([ 'rsync', '-az', '--exclude', '*.sw?', '--exclude', '*.bak', '--exclude', '*.orig', '--exclude', '*.rej', self.postgres_src_dir + '/', self.pg_build_root], capture_output=False) if is_verbose_mode(): logging.info("Successfully synced postgres source code")
def configure_postgres(self): if is_verbose_mode(): logging.info("Running configure in the postgres build directory") # Don't enable -Werror when running configure -- that can affect the resulting # configuration. self.set_env_vars('configure') configure_cmd_line = [ './configure', '--prefix', self.pg_prefix, '--enable-depend', # We're enabling debug symbols for all types of builds. '--enable-debug'] if not get_bool_env_var('YB_NO_PG_CONFIG_CACHE'): configure_cmd_line.append('--config-cache') # We get readline-related errors in ASAN/TSAN, so let's disable readline there. if self.build_type in ['asan', 'tsan']: configure_cmd_line += ['--without-readline'] if self.build_type != 'release': configure_cmd_line += ['--enable-cassert'] configure_result = run_program(configure_cmd_line, error_ok=True) if configure_result.failure(): rerun_configure = False for line in configure_result.stderr.splitlines(): if REMOVE_CONFIG_CACHE_MSG_RE.search(line.strip()): logging.info("Configure failed because of stale config.cache, re-running.") run_program('rm -f config.cache') rerun_configure = True break if not rerun_configure: logging.error("Standard error from configure:\n" + configure_result.stderr) raise RuntimeError("configure failed") configure_result = run_program(configure_cmd_line) if is_verbose_mode(): configure_result.print_output_to_stdout() write_program_output_to_file('configure', configure_result, self.pg_build_root) logging.info("Successfully ran configure in the postgres build directory")
def make_postgres(self): self.set_env_vars('make') make_cmd = ['make'] make_parallelism = os.environ.get('YB_MAKE_PARALLELISM') if make_parallelism: make_cmd += ['-j', str(int(make_parallelism))] # Create a script allowing to easily run "make" from the build directory with the right # environment. make_script_content = "#!/usr/bin/env bash\n" for env_var_name in [ 'YB_SRC_ROOT', 'YB_BUILD_ROOT', 'YB_BUILD_TYPE', 'CFLAGS', 'CXXFLAGS', 'LDFLAGS', 'PATH']: env_var_value = os.environ[env_var_name] if env_var_value is None: raise RuntimeError("Expected env var %s to be set" % env_var_name) make_script_content += "export %s=%s\n" % (env_var_name, quote_for_bash(env_var_value)) make_script_content += 'make "$@"\n' for work_dir in [self.pg_build_root, os.path.join(self.pg_build_root, 'contrib')]: with WorkDirContext(work_dir): # Create a script to run Make easily with the right environment. make_script_path = 'make.sh' with open(make_script_path, 'w') as out_f: out_f.write(make_script_content) run_program(['chmod', 'u+x', make_script_path]) # Actually run Make. if is_verbose_mode(): logging.info("Running make in the %s directory", work_dir) make_result = run_program(make_cmd) write_program_output_to_file('make', make_result, work_dir) make_install_result = run_program(['make', 'install']) write_program_output_to_file('make_install', make_install_result, work_dir) logging.info("Successfully ran make in the %s directory", work_dir)
def set_env_vars(self, step): if step not in BUILD_STEPS: raise RuntimeError( ("Invalid step specified for setting env vars: must be in {}" ).format(BUILD_STEPS)) is_make_step = step == 'make' self.set_env_var('YB_PG_BUILD_STEP', step) self.set_env_var('YB_THIRDPARTY_DIR', self.thirdparty_dir) self.set_env_var('YB_SRC_ROOT', YB_SRC_ROOT) for var_name in ['CFLAGS', 'CXXFLAGS', 'LDFLAGS', 'LDFLAGS_EX']: arg_value = getattr(self.args, var_name.lower()) self.set_env_var(var_name, arg_value) additional_c_cxx_flags = [ '-Wimplicit-function-declaration', '-Wno-error=unused-function', '-DHAVE__BUILTIN_CONSTANT_P=1', '-DUSE_SSE42_CRC32C=1', '-std=c11', '-Werror=implicit-function-declaration', '-Werror=int-conversion', ] if self.compiler_type == 'clang': additional_c_cxx_flags += ['-Wno-builtin-requires-header'] is_gcc = self.compiler_type.startswith('gcc') if is_make_step: additional_c_cxx_flags += [ '-Wall', '-Werror', '-Wno-error=unused-function' ] if self.build_type == 'release': if self.compiler_type == 'clang': additional_c_cxx_flags += [ '-Wno-error=array-bounds', '-Wno-error=gnu-designator' ] if is_gcc: additional_c_cxx_flags += ['-Wno-error=strict-overflow'] if self.build_type == 'asan': additional_c_cxx_flags += [ '-fsanitize-recover=signed-integer-overflow', '-fsanitize-recover=shift-base', '-fsanitize-recover=shift-exponent' ] # Tell gdb to pretend that we're compiling the code in the $YB_SRC_ROOT/src/postgres # directory. additional_c_cxx_flags += [ '-fdebug-prefix-map=%s=%s' % (build_path, source_path) for build_path in get_absolute_path_aliases(self.pg_build_root) for source_path in get_absolute_path_aliases(self.postgres_src_dir) ] if is_gcc: additional_c_cxx_flags.append('-Wno-error=maybe-uninitialized') for var_name in ['CFLAGS', 'CXXFLAGS']: os.environ[var_name] = filter_compiler_flags( os.environ.get(var_name, '') + ' ' + ' '.join(additional_c_cxx_flags), step, language='c' if var_name == 'CFLAGS' else 'c++') if step == 'make': self.adjust_cflags_in_makefile() for env_var_name in ['MAKEFLAGS']: if env_var_name in os.environ: del os.environ[env_var_name] compiler_wrappers_dir = os.path.join(YB_SRC_ROOT, 'build-support', 'compiler-wrappers') self.set_env_var('CC', os.path.join(compiler_wrappers_dir, 'cc')) self.set_env_var('CXX', os.path.join(compiler_wrappers_dir, 'c++')) self.set_env_var( 'LDFLAGS', re.sub(r'-Wl,--no-undefined', ' ', os.environ['LDFLAGS'])) self.set_env_var( 'LDFLAGS', re.sub(r'-Wl,--no-allow-shlib-undefined', ' ', os.environ['LDFLAGS'])) self.append_to_env_var( 'LDFLAGS', '-Wl,-rpath,' + os.path.join(self.build_root, 'lib')) if sys.platform != 'darwin': for ldflags_var_name in ['LDFLAGS', 'LDFLAGS_EX']: self.append_to_env_var(ldflags_var_name, '-lm') if is_verbose_mode(): # CPPFLAGS are C preprocessor flags, CXXFLAGS are C++ flags. for env_var_name in [ 'CFLAGS', 'CXXFLAGS', 'CPPFLAGS', 'LDFLAGS', 'LDFLAGS_EX', 'LIBS' ]: if env_var_name in os.environ: logging.info("%s: %s", env_var_name, os.environ[env_var_name]) self.remote_compilation_allowed = ALLOW_REMOTE_COMPILATION and is_make_step self.set_env_var( 'YB_REMOTE_COMPILATION', '1' if (self.remote_compilation_allowed and self.build_uses_remote_compilation and step == 'make') else '0') self.set_env_var('YB_BUILD_TYPE', self.build_type) # Do not try to make rpaths relative during the configure step, as that may slow it down. self.set_env_var('YB_DISABLE_RELATIVE_RPATH', '1' if step == 'configure' else '0') # We need to add this directory to PATH so Postgres build could find Bison. thirdparty_installed_common_bin_path = os.path.join( self.thirdparty_dir, 'installed', 'common', 'bin') new_path_str = ':'.join([thirdparty_installed_common_bin_path] + self.original_path) os.environ['PATH'] = new_path_str
def set_env_vars(self, step): if step not in ['configure', 'make']: raise RuntimeError( ("Invalid step specified for setting env vars, must be either 'configure' " "or 'make'").format(step)) os.environ['YB_BUILD_ROOT'] = self.build_root os.environ['YB_SRC_ROOT'] = YB_SRC_ROOT for var_name in ['CFLAGS', 'CXXFLAGS', 'LDFLAGS', 'LDFLAGS_EX']: arg_value = getattr(self.args, var_name.lower()) if arg_value is not None: os.environ[var_name] = arg_value else: del os.environ[var_name] additional_c_cxx_flags = [ '-Wimplicit-function-declaration', '-Wno-error=unused-function', '-DHAVE__BUILTIN_CONSTANT_P=1', '-DUSE_SSE42_CRC32C=1', ] # Tell gdb to pretend that we're compiling the code in the $YB_SRC_ROOT/src/postgres # directory. build_path_variants = get_path_variants(self.pg_build_root) src_path_variants = get_path_variants(self.postgres_src_dir) additional_c_cxx_flags += [ '-fdebug-prefix-map=%s=%s' % (build_path, source_path) for build_path in build_path_variants for source_path in src_path_variants ] if self.compiler_type == 'gcc': additional_c_cxx_flags.append('-Wno-error=maybe-uninitialized') for var_name in ['CFLAGS', 'CXXFLAGS']: os.environ[var_name] = filter_compiler_flags( os.environ.get(var_name, '') + ' ' + ' '.join(additional_c_cxx_flags), allow_error_on_warning=(step == 'make')) if step == 'make': self.adjust_cflags_in_makefile() for env_var_name in ['MAKEFLAGS']: if env_var_name in os.environ: del os.environ[env_var_name] compiler_wrappers_dir = os.path.join(YB_SRC_ROOT, 'build-support', 'compiler-wrappers') os.environ['CC'] = os.path.join(compiler_wrappers_dir, 'cc') os.environ['CXX'] = os.path.join(compiler_wrappers_dir, 'c++') os.environ['LDFLAGS'] = re.sub(r'-Wl,--no-undefined', ' ', os.environ['LDFLAGS']) os.environ['LDFLAGS'] = re.sub(r'-Wl,--no-allow-shlib-undefined', ' ', os.environ['LDFLAGS']) os.environ['LDFLAGS'] += ' -Wl,-rpath,' + os.path.join(self.build_root, 'lib') for ldflags_var_name in ['LDFLAGS', 'LDFLAGS_EX']: os.environ[ldflags_var_name] += ' -lm' if is_verbose_mode(): # CPPFLAGS are C preprocessor flags, CXXFLAGS are C++ flags. for env_var_name in ['CFLAGS', 'CXXFLAGS', 'CPPFLAGS', 'LDFLAGS', 'LDFLAGS_EX', 'LIBS']: if env_var_name in os.environ: logging.info("%s: %s", env_var_name, os.environ[env_var_name]) # PostgreSQL builds pretty fast, and we don't want to use our remote compilation over SSH # for it as it might have issues with parallelism. os.environ['YB_NO_REMOTE_BUILD'] = '1' os.environ['YB_BUILD_TYPE'] = self.build_type
def make_postgres(self): self.set_env_vars('make') # Postgresql requires MAKELEVEL to be 0 or non-set when calling its make. # But in case YB project is built with make, MAKELEVEL is not 0 at this point. make_cmd = ['make', 'MAKELEVEL=0'] make_parallelism = os.environ.get('YB_MAKE_PARALLELISM') if make_parallelism: make_parallelism = int(make_parallelism) if self.build_uses_remote_compilation and not self.remote_compilation_allowed: # Since we're building everything locally in this case, and YB_MAKE_PARALLELISM is # likely specified for distributed compilation, cap it at some factor times the number # of CPU cores. parallelism_cap = multiprocessing.cpu_count() * 2 if make_parallelism: make_parallelism = min(parallelism_cap, make_parallelism) else: make_parallelism = cpu_count if make_parallelism: make_cmd += ['-j', str(int(make_parallelism))] self.set_env_var('YB_COMPILER_TYPE', self.compiler_type) # Create a script allowing to easily run "make" from the build directory with the right # environment. env_script_content = '' for env_var_name in CONFIG_ENV_VARS: env_var_value = os.environ.get(env_var_name) if env_var_value is None: raise RuntimeError("Expected env var %s to be set" % env_var_name) env_script_content += "export %s=%s\n" % ( env_var_name, quote_for_bash(env_var_value)) compile_commands_files = [] work_dirs = [self.pg_build_root] if self.build_type != 'compilecmds': work_dirs.append(os.path.join(self.pg_build_root, 'contrib')) for work_dir in work_dirs: with WorkDirContext(work_dir): # Create a script to run Make easily with the right environment. make_script_path = 'make.sh' with open(make_script_path, 'w') as out_f: out_f.write('#!/usr/bin/env bash\n' '. "${BASH_SOURCE%/*}"/env.sh\n' 'make "$@"\n') with open('env.sh', 'w') as out_f: out_f.write(env_script_content) run_program(['chmod', 'u+x', make_script_path]) # Actually run Make. if is_verbose_mode(): logging.info("Running make in the %s directory", work_dir) run_program( make_cmd, stdout_stderr_prefix='make', cwd=work_dir, shell=True, error_ok=True).print_output_and_raise_error_if_failed() if self.build_type == 'compilecmds': logging.info( "Not running make install in the %s directory since we are only " "generating the compilation database", work_dir) else: run_program('make install', stdout_stderr_prefix='make_install', cwd=work_dir, shell=True, error_ok=True ).print_output_and_raise_error_if_failed() logging.info("Successfully ran make in the %s directory", work_dir) if self.export_compile_commands: logging.info( "Generating the compilation database in directory '%s'", work_dir) compile_commands_path = os.path.join( work_dir, 'compile_commands.json') self.set_env_var('YB_PG_SKIP_CONFIG_STATUS', '1') if not os.path.exists(compile_commands_path): run_program(['compiledb', 'make', '-n'], capture_output=False) del os.environ['YB_PG_SKIP_CONFIG_STATUS'] if not os.path.exists(compile_commands_path): raise RuntimeError( "Failed to generate compilation database at: %s" % compile_commands_path) compile_commands_files.append(compile_commands_path) if self.export_compile_commands: self.combine_compile_commands(compile_commands_files)
def set_env_vars(self, step): if step not in ['configure', 'make']: raise RuntimeError(( "Invalid step specified for setting env vars, must be either 'configure' " "or 'make'").format(step)) os.environ['YB_BUILD_ROOT'] = self.build_root os.environ['YB_SRC_ROOT'] = YB_SRC_ROOT for var_name in ['CFLAGS', 'CXXFLAGS', 'LDFLAGS', 'LDFLAGS_EX']: arg_value = getattr(self.args, var_name.lower()) if arg_value is not None: os.environ[var_name] = arg_value else: del os.environ[var_name] additional_c_cxx_flags = [ '-Wimplicit-function-declaration', '-Wno-error=unused-function', '-DHAVE__BUILTIN_CONSTANT_P=1', '-DUSE_SSE42_CRC32C=1', ] # Tell gdb to pretend that we're compiling the code in the $YB_SRC_ROOT/src/postgres # directory. build_path_variants = get_path_variants(self.pg_build_root) src_path_variants = get_path_variants(self.postgres_src_dir) additional_c_cxx_flags += [ '-fdebug-prefix-map=%s=%s' % (build_path, source_path) for build_path in build_path_variants for source_path in src_path_variants ] if self.compiler_type == 'gcc': additional_c_cxx_flags.append('-Wno-error=maybe-uninitialized') for var_name in ['CFLAGS', 'CXXFLAGS']: os.environ[var_name] = filter_compiler_flags( os.environ.get(var_name, '') + ' ' + ' '.join(additional_c_cxx_flags), allow_error_on_warning=(step == 'make')) if step == 'make': self.adjust_cflags_in_makefile() for env_var_name in ['MAKEFLAGS']: if env_var_name in os.environ: del os.environ[env_var_name] compiler_wrappers_dir = os.path.join(YB_SRC_ROOT, 'build-support', 'compiler-wrappers') os.environ['CC'] = os.path.join(compiler_wrappers_dir, 'cc') os.environ['CXX'] = os.path.join(compiler_wrappers_dir, 'c++') os.environ['LDFLAGS'] = re.sub(r'-Wl,--no-undefined', ' ', os.environ['LDFLAGS']) os.environ['LDFLAGS'] = re.sub(r'-Wl,--no-allow-shlib-undefined', ' ', os.environ['LDFLAGS']) os.environ['LDFLAGS'] += ' -Wl,-rpath,' + os.path.join( self.build_root, 'lib') for ldflags_var_name in ['LDFLAGS', 'LDFLAGS_EX']: os.environ[ldflags_var_name] += ' -lm' if is_verbose_mode(): # CPPFLAGS are C preprocessor flags, CXXFLAGS are C++ flags. for env_var_name in [ 'CFLAGS', 'CXXFLAGS', 'CPPFLAGS', 'LDFLAGS', 'LDFLAGS_EX', 'LIBS' ]: if env_var_name in os.environ: logging.info("%s: %s", env_var_name, os.environ[env_var_name]) # PostgreSQL builds pretty fast, and we don't want to use our remote compilation over SSH # for it as it might have issues with parallelism. os.environ['YB_NO_REMOTE_BUILD'] = '1' os.environ['YB_BUILD_TYPE'] = self.build_type
def make_postgres(self) -> None: self.set_env_vars('make') # Postgresql requires MAKELEVEL to be 0 or non-set when calling its make. # But in case YB project is built with make, MAKELEVEL is not 0 at this point. make_cmd = ['make', 'MAKELEVEL=0'] if is_macos_arm64(): make_cmd = ['arch', '-arm64'] + make_cmd make_parallelism_str: Optional[str] = os.environ.get( 'YB_MAKE_PARALLELISM') make_parallelism: Optional[int] = None if make_parallelism_str is not None: make_parallelism = int(make_parallelism_str) if self.build_uses_remote_compilation and not self.remote_compilation_allowed: # Since we're building everything locally in this case, and YB_MAKE_PARALLELISM is # likely specified for distributed compilation, cap it at some factor times the number # of CPU cores. parallelism_cap = multiprocessing.cpu_count() * 2 if make_parallelism: make_parallelism = min(parallelism_cap, make_parallelism) else: make_parallelism = parallelism_cap if make_parallelism: make_cmd += ['-j', str(int(make_parallelism))] self.set_env_var('YB_COMPILER_TYPE', self.compiler_type) # Create a script allowing to easily run "make" from the build directory with the right # environment. env_script_content = '' for env_var_name in CONFIG_ENV_VARS: env_var_value = os.environ.get(env_var_name) if env_var_value is None: raise RuntimeError("Expected env var %s to be set" % env_var_name) env_script_content += "export %s=%s\n" % ( env_var_name, quote_for_bash(env_var_value)) pg_compile_commands_paths = [] third_party_extensions_dir = os.path.join(self.pg_build_root, 'third-party-extensions') work_dirs = [ self.pg_build_root, os.path.join(self.pg_build_root, 'contrib'), third_party_extensions_dir ] for work_dir in work_dirs: with WorkDirContext(work_dir): # Create a script to run Make easily with the right environment. make_script_path = 'make.sh' with open(make_script_path, 'w') as out_f: out_f.write('#!/usr/bin/env bash\n' '. "${BASH_SOURCE%/*}"/env.sh\n' 'make "$@"\n') with open('env.sh', 'w') as out_f: out_f.write(env_script_content) run_program(['chmod', 'u+x', make_script_path]) make_cmd_suffix = [] if work_dir == third_party_extensions_dir: make_cmd_suffix = ['PG_CONFIG=' + self.pg_config_path] # Actually run Make. if is_verbose_mode(): logging.info("Running make in the %s directory", work_dir) complete_make_cmd = make_cmd + make_cmd_suffix complete_make_cmd_str = shlex_join(complete_make_cmd) complete_make_install_cmd = make_cmd + ['install' ] + make_cmd_suffix attempt = 0 while attempt <= TRANSIENT_BUILD_RETRIES: attempt += 1 make_result = run_program( complete_make_cmd_str, stdout_stderr_prefix='make', cwd=work_dir, error_ok=True, shell=True # TODO: get rid of shell=True. ) if make_result.failure(): transient_err = False stderr_lines = make_result.get_stderr().split('\n') for line in stderr_lines: if any(transient_error_pattern in line for transient_error_pattern in TRANSIENT_BUILD_ERRORS): transient_err = True logging.info(f'Transient error: {line}') break if transient_err: logging.info( f"Transient error during build attempt {attempt}. " f"Re-trying make command: {complete_make_cmd_str}." ) else: make_result.print_output_to_stdout() raise RuntimeError("PostgreSQL compilation failed") else: logging.info( "Successfully ran 'make' in the %s directory", work_dir) break # No error, break out of retry loop else: raise RuntimeError( f"Maximum build attempts reached ({TRANSIENT_BUILD_RETRIES} attempts)." ) if self.build_type != 'compilecmds' or work_dir == self.pg_build_root: run_program( ' '.join( shlex.quote(arg) for arg in complete_make_install_cmd), stdout_stderr_prefix='make_install', cwd=work_dir, error_ok=True, shell=True # TODO: get rid of shell=True. ).print_output_and_raise_error_if_failed() logging.info( "Successfully ran 'make install' in the %s directory", work_dir) else: logging.info( "Not running 'make install' in the %s directory since we are only " "generating the compilation database", work_dir) if self.export_compile_commands and not self.skip_pg_compile_commands: logging.info( "Generating the compilation database in directory '%s'", work_dir) compile_commands_path = os.path.join( work_dir, 'compile_commands.json') self.set_env_var('YB_PG_SKIP_CONFIG_STATUS', '1') if not os.path.exists(compile_commands_path): run_program(['compiledb', 'make', '-n'] + make_cmd_suffix, capture_output=False) del os.environ['YB_PG_SKIP_CONFIG_STATUS'] if not os.path.exists(compile_commands_path): raise RuntimeError( "Failed to generate compilation database at: %s" % compile_commands_path) pg_compile_commands_paths.append(compile_commands_path) if self.export_compile_commands: self.write_compile_commands_files(pg_compile_commands_paths)
def configure_postgres(self) -> None: if is_verbose_mode(): logging.info("Running configure in the postgres build directory") # Don't enable -Werror when running configure -- that can affect the resulting # configuration. configure_cmd_line = [ './configure', '--prefix', self.pg_prefix, '--with-extra-version=-YB-' + self.get_yb_version(), '--enable-depend', '--with-icu', '--with-ldap', '--with-openssl', '--with-gssapi', # Options are ossp (original/old implementation), bsd (BSD) and e2fs # (libuuid-based for Unix/Mac). '--with-uuid=e2fs', '--with-libedit-preferred', '--with-includes=' + self.openssl_include_dir, '--with-libraries=' + self.openssl_lib_dir, # We're enabling debug symbols for all types of builds. '--enable-debug' ] if is_macos_arm64(): configure_cmd_line.insert(0, '/opt/homebrew/bin/bash') if not get_bool_env_var('YB_NO_PG_CONFIG_CACHE'): configure_cmd_line.append('--config-cache') # We get readline-related errors in ASAN/TSAN, so let's disable readline there. if self.build_type in ['asan', 'tsan']: # TODO: do we still need this limitation? configure_cmd_line += ['--without-readline'] if self.build_type not in ['release', 'prof_gen', 'prof_use']: configure_cmd_line += ['--enable-cassert'] # Unset YB_SHOW_COMPILER_COMMAND_LINE when configuring postgres to avoid unintended side # effects from additional compiler output. with EnvVarContext(YB_SHOW_COMPILER_COMMAND_LINE=None): configure_result = run_program(configure_cmd_line, error_ok=True) if configure_result.failure(): rerun_configure = False for line in configure_result.stderr.splitlines(): if REMOVE_CONFIG_CACHE_MSG_RE.search(line.strip()): logging.info( "Configure failed because of stale config.cache, re-running." ) run_program('rm -f config.cache') rerun_configure = True break if not rerun_configure: logging.error("Standard error from configure:\n" + configure_result.stderr) config_log_path = os.path.join(self.pg_build_root, "config.log") if os.path.exists(config_log_path): with open(config_log_path) as config_log_file: config_log_str = config_log_file.read() logging.info(f"Contents of {config_log_path}:") sys.stderr.write(config_log_str + "\n") else: logging.warning(f"File not found: {config_log_path}") raise RuntimeError("configure failed") configure_result = run_program(configure_cmd_line, shell=True, stdout_stderr_prefix='configure', error_ok=True) if is_verbose_mode() and configure_result.success(): configure_result.print_output_to_stdout() configure_result.print_output_and_raise_error_if_failed() logging.info( "Successfully ran configure in the postgres build directory")
def set_env_vars(self, step): if step not in ['configure', 'make']: raise RuntimeError(( "Invalid step specified for setting env vars, must be either 'configure' " "or 'make'").format(step)) self.set_env_var('YB_PG_BUILD_STEP', step) self.set_env_var('YB_BUILD_ROOT', self.build_root) self.set_env_var('YB_SRC_ROOT', YB_SRC_ROOT) for var_name in ['CFLAGS', 'CXXFLAGS', 'LDFLAGS', 'LDFLAGS_EX']: arg_value = getattr(self.args, var_name.lower()) self.set_env_var(var_name, arg_value) additional_c_cxx_flags = [ '-Wimplicit-function-declaration', '-Wno-error=unused-function', '-DHAVE__BUILTIN_CONSTANT_P=1', '-DUSE_SSE42_CRC32C=1', '-std=c11', '-Werror=implicit-function-declaration', '-Werror=int-conversion', ] if self.compiler_type == 'clang': additional_c_cxx_flags += ['-Wno-error=builtin-requires-header'] if step == 'make': additional_c_cxx_flags += [ '-Wall', '-Werror', '-Wno-error=unused-function' ] if self.build_type == 'release': if self.compiler_type == 'clang': additional_c_cxx_flags += [ '-Wno-error=array-bounds', '-Wno-error=gnu-designator' ] if self.compiler_type == 'gcc': additional_c_cxx_flags += ['-Wno-error=strict-overflow'] if self.build_type == 'asan': additional_c_cxx_flags += [ '-fsanitize-recover=signed-integer-overflow', '-fsanitize-recover=shift-base', '-fsanitize-recover=shift-exponent' ] # Tell gdb to pretend that we're compiling the code in the $YB_SRC_ROOT/src/postgres # directory. build_path_variants = get_path_variants(self.pg_build_root) src_path_variants = get_path_variants(self.postgres_src_dir) additional_c_cxx_flags += [ '-fdebug-prefix-map=%s=%s' % (build_path, source_path) for build_path in build_path_variants for source_path in src_path_variants ] if self.compiler_type == 'gcc': additional_c_cxx_flags.append('-Wno-error=maybe-uninitialized') for var_name in ['CFLAGS', 'CXXFLAGS']: os.environ[var_name] = filter_compiler_flags( os.environ.get(var_name, '') + ' ' + ' '.join(additional_c_cxx_flags), step, language='c' if var_name == 'CFLAGS' else 'c++') if step == 'make': self.adjust_cflags_in_makefile() for env_var_name in ['MAKEFLAGS']: if env_var_name in os.environ: del os.environ[env_var_name] compiler_wrappers_dir = os.path.join(YB_SRC_ROOT, 'build-support', 'compiler-wrappers') self.set_env_var('CC', os.path.join(compiler_wrappers_dir, 'cc')) self.set_env_var('CXX', os.path.join(compiler_wrappers_dir, 'c++')) self.set_env_var( 'LDFLAGS', re.sub(r'-Wl,--no-undefined', ' ', os.environ['LDFLAGS'])) self.set_env_var( 'LDFLAGS', re.sub(r'-Wl,--no-allow-shlib-undefined', ' ', os.environ['LDFLAGS'])) self.append_to_env_var( 'LDFLAGS', '-Wl,-rpath,' + os.path.join(self.build_root, 'lib')) for ldflags_var_name in ['LDFLAGS', 'LDFLAGS_EX']: self.append_to_env_var(ldflags_var_name, '-lm') if is_verbose_mode(): # CPPFLAGS are C preprocessor flags, CXXFLAGS are C++ flags. for env_var_name in [ 'CFLAGS', 'CXXFLAGS', 'CPPFLAGS', 'LDFLAGS', 'LDFLAGS_EX', 'LIBS' ]: if env_var_name in os.environ: logging.info("%s: %s", env_var_name, os.environ[env_var_name]) # PostgreSQL builds pretty fast, and we don't want to use our remote compilation over SSH # for it as it might have issues with parallelism. self.remote_compilation_allowed = ALLOW_REMOTE_COMPILATION and step == 'make' self.set_env_var( 'YB_REMOTE_COMPILATION', '1' if self.remote_compilation_allowed and self.build_uses_remote_compilation else '0') self.set_env_var('YB_BUILD_TYPE', self.build_type) # Do not try to make rpaths relative during the configure step, as that may slow it down. self.set_env_var('YB_DISABLE_RELATIVE_RPATH', '1' if step == 'configure' else '0')