Example #1
0
    def test_stdout_and_stderr_together(self):
        self.assertRegexpMatches(
            run_program('echo foo').get_stdout_and_stderr_together().strip(),
            r"""
Standard output from external program {{ echo foo }} running in '.*':
foo
\(end of standard output\)
            """.strip())
        self.assertRegexpMatches(
            run_program(
                'echo bar >&2').get_stdout_and_stderr_together().strip(), r"""
Standard error from external program {{ echo bar >&2 }} running in '.*':
bar
\(end of standard error\)
            """.strip())
        self.assertRegexpMatches(
            run_program('echo foo; echo bar >&2').
            get_stdout_and_stderr_together().strip(), r"""
Standard output from external program {{ echo foo; echo bar >&2 }} running in '.*':
foo
\(end of standard output\)

Standard error from external program {{ echo foo; echo bar >&2 }} running in '.*':
bar
\(end of standard error\)
            """.strip())
Example #2
0
    def test_redirect_output_to_files(self):
        stdout_path = get_tmp_file_path(prefix='test_redirect_to_file',
                                        suffix='.stdout')
        stderr_path = get_tmp_file_path(prefix='test_redirect_to_file',
                                        suffix='.stderr')
        result = run_program(
            'echo Foo; echo Foo 2; echo Bar >&2; echo Bar 2 >&2',
            stdout_path=stdout_path,
            stderr_path=stderr_path)
        self.assertEqual(0, result.returncode)
        self.assertIsNone(result.stdout)
        self.assertIsNone(result.stderr)
        self.assertEqual("Foo\nFoo 2\n", read_file(stdout_path))
        self.assertEqual("Bar\nBar 2\n", read_file(stderr_path))

        out_err_prefix = get_tmp_file_path(
            prefix='test_redirect_to_file_with_prefix')
        result = run_program(
            "echo 'This is stdout with one slash: \\. And now two slashes: \\\\.'; "
            "echo 'And now 3 slashes: \\\\\\. And four: \\\\\\\\.' >&2",
            stdout_stderr_prefix=out_err_prefix)
        self.assertEqual(0, result.returncode)
        self.assertIsNone(result.stdout)
        self.assertIsNone(result.stderr)
        self.assertEqual(
            "This is stdout with one slash: \\. And now two slashes: \\\\.\n",
            read_file(out_err_prefix + '.out'))
        self.assertEqual("And now 3 slashes: \\\\\\. And four: \\\\\\\\.\n",
                         read_file(out_err_prefix + '.err'))
Example #3
0
 def sync_postgres_source(self):
     logging.info("Syncing postgres source code")
     # Remove source code files from the build directory that have been removed from the
     # source directory.
     # TODO: extend this to the complete list of source file types.
     run_program([
         'rsync', '-avz',
         '--include', '*.c',
         '--include', '*.h',
         '--include', 'Makefile',
         '--include', '*.am',
         '--include', '*.in',
         '--include', '*.mk',
         '--exclude', '*',
         '--delete',
         self.postgres_src_dir + '/', self.pg_build_root],
         capture_output=False)
     run_program([
         'rsync',
         '-az',
         '--exclude', '*.sw?',
         '--exclude', '*.bak',
         '--exclude', '*.orig',
         '--exclude', '*.rej',
         self.postgres_src_dir + '/', self.pg_build_root],
         capture_output=False)
     if is_verbose_mode():
         logging.info("Successfully synced postgres source code")
Example #4
0
 def sync_postgres_source(self):
     logging.info("Syncing postgres source code")
     # Remove source code files from the build directory that have been removed from the
     # source directory.
     # TODO: extend this to the complete list of source file types.
     run_program([
         'rsync', '-avz',
         '--include', '*.c',
         '--include', '*.h',
         '--include', 'Makefile',
         '--include', '*.am',
         '--include', '*.in',
         '--include', '*.mk',
         '--exclude', '*',
         '--delete',
         self.postgres_src_dir + '/', self.pg_build_root],
         capture_output=False)
     run_program([
         'rsync',
         '-az',
         '--exclude', '*.sw?',
         '--exclude', '*.bak',
         '--exclude', '*.orig',
         '--exclude', '*.rej',
         self.postgres_src_dir + '/', self.pg_build_root],
         capture_output=False)
     if is_verbose_mode():
         logging.info("Successfully synced postgres source code")
Example #5
0
 def clean_postgres(self):
     logging.info(
         "Removing the postgres build and installation directories")
     for dir_to_delete in [self.pg_build_root, self.pg_prefix]:
         logging.info("Deleting the directory '%s'", dir_to_delete)
         # rm -rf is much faster than Python's rmtree.
         run_program(['rm', '-rf', dir_to_delete], capture_output=False)
Example #6
0
    def configure_postgres(self):
        if is_verbose_mode():
            logging.info("Running configure in the postgres build directory")
        # Don't enable -Werror when running configure -- that can affect the resulting
        # configuration.
        configure_cmd_line = [
            './configure',
            '--prefix',
            self.pg_prefix,
            '--with-extra-version=-YB-' + self.get_yb_version(),
            '--enable-depend',
            '--with-openssl',
            '--with-libedit-preferred',
            '--with-includes=' + self.openssl_include_dir,
            '--with-libraries=' + self.openssl_lib_dir,
            # We're enabling debug symbols for all types of builds.
            '--enable-debug'
        ]
        if not get_bool_env_var('YB_NO_PG_CONFIG_CACHE'):
            configure_cmd_line.append('--config-cache')

        # We get readline-related errors in ASAN/TSAN, so let's disable readline there.
        if self.build_type in ['asan', 'tsan']:
            # TODO: do we still need this limitation?
            configure_cmd_line += ['--without-readline']

        if self.build_type != 'release':
            configure_cmd_line += ['--enable-cassert']
        configure_result = run_program(configure_cmd_line, error_ok=True)
        if configure_result.failure():
            rerun_configure = False
            for line in configure_result.stderr.splitlines():
                if REMOVE_CONFIG_CACHE_MSG_RE.search(line.strip()):
                    logging.info(
                        "Configure failed because of stale config.cache, re-running."
                    )
                    run_program('rm -f config.cache')
                    rerun_configure = True
                    break

            if not rerun_configure:
                logging.error("Standard error from configure:\n" +
                              configure_result.stderr)
                raise RuntimeError("configure failed")

            configure_result = run_program(configure_cmd_line,
                                           shell=True,
                                           stdout_stderr_prefix='configure',
                                           error_ok=True)

        if is_verbose_mode() and configure_result.success():
            configure_result.print_output_to_stdout()

        configure_result.print_output_and_raise_error_if_failed()

        logging.info(
            "Successfully ran configure in the postgres build directory")
Example #7
0
    def make_postgres(self):
        self.set_env_vars('make')
        make_cmd = ['make']

        make_parallelism = os.environ.get('YB_MAKE_PARALLELISM')
        if make_parallelism:
            make_parallelism = int(make_parallelism)
        if self.build_uses_remote_compilation and not self.remote_compilation_allowed:
            # Since we're building everything locally in this case, and YB_MAKE_PARALLELISM is
            # likely specified for distributed compilation, cap it at some factor times the number
            # of CPU cores.
            parallelism_cap = multiprocessing.cpu_count() * 2
            if make_parallelism:
                make_parallelism = min(parallelism_cap, make_parallelism)
            else:
                make_parallelism = cpu_count

        if make_parallelism:
            make_cmd += ['-j', str(int(make_parallelism))]

        os.environ['YB_COMPILER_TYPE'] = self.compiler_type

        # Create a script allowing to easily run "make" from the build directory with the right
        # environment.
        make_script_content = "#!/usr/bin/env bash\n"
        for env_var_name in [
                'YB_SRC_ROOT', 'YB_BUILD_ROOT', 'YB_BUILD_TYPE', 'CFLAGS', 'CXXFLAGS', 'LDFLAGS',
                'PATH']:
            env_var_value = os.environ[env_var_name]
            if env_var_value is None:
                raise RuntimeError("Expected env var %s to be set" % env_var_name)
            make_script_content += "export %s=%s\n" % (env_var_name, quote_for_bash(env_var_value))
        make_script_content += 'make "$@"\n'

        for work_dir in [self.pg_build_root, os.path.join(self.pg_build_root, 'contrib')]:
            with WorkDirContext(work_dir):
                # Create a script to run Make easily with the right environment.
                make_script_path = 'make.sh'
                with open(make_script_path, 'w') as out_f:
                    out_f.write(make_script_content)
                run_program(['chmod', 'u+x', make_script_path])

                # Actually run Make.
                if is_verbose_mode():
                    logging.info("Running make in the %s directory", work_dir)
                make_result = run_program(make_cmd)
                write_program_output_to_file('make', make_result, work_dir)
                make_install_result = run_program(['make', 'install'])
                write_program_output_to_file('make_install', make_install_result, work_dir)
                logging.info("Successfully ran make in the %s directory", work_dir)
    def configure_postgres(self):
        if is_verbose_mode():
            logging.info("Running configure in the postgres build directory")
        # Don't enable -Werror when running configure -- that can affect the resulting
        # configuration.
        self.set_env_vars('configure')
        configure_cmd_line = [
            './configure',
            '--prefix',
            self.pg_prefix,
            '--enable-depend',
            # We're enabling debug symbols for all types of builds.
            '--enable-debug'
        ]
        if not get_bool_env_var('YB_NO_PG_CONFIG_CACHE'):
            configure_cmd_line.append('--config-cache')

        # We get readline-related errors in ASAN/TSAN, so let's disable readline there.
        if self.build_type in ['asan', 'tsan']:
            configure_cmd_line += ['--without-readline']

        if self.build_type != 'release':
            configure_cmd_line += ['--enable-cassert']
        configure_result = run_program(configure_cmd_line, error_ok=True)
        if configure_result.failure():
            rerun_configure = False
            for line in configure_result.stderr.splitlines():
                if REMOVE_CONFIG_CACHE_MSG_RE.search(line.strip()):
                    logging.info(
                        "Configure failed because of stale config.cache, re-running."
                    )
                    run_program('rm -f config.cache')
                    rerun_configure = True
                    break

            if not rerun_configure:
                logging.error("Standard error from configure:\n" +
                              configure_result.stderr)
                raise RuntimeError("configure failed")

            configure_result = run_program(configure_cmd_line)

        if is_verbose_mode():
            configure_result.print_output_to_stdout()
        write_program_output_to_file('configure', configure_result,
                                     self.pg_build_root)

        logging.info(
            "Successfully ran configure in the postgres build directory")
Example #9
0
 def test_no_output_capture(self):
     result = run_program(
         'echo "This is expected to show up in the test output"',
         capture_output=False)
     self.assertIsNone(result.stdout)
     self.assertIsNone(result.stderr)
     self.assertFalse(result.output_captured)
Example #10
0
    def test_work_dir_context(self):
        old_work_dir = os.getcwd()
        for d in ['/tmp', os.path.expanduser('~')]:
            with WorkDirContext(d):
                self.assertEquals(d, os.getcwd())
                self.assertEquals(d, run_program('pwd').stdout.strip())

        self.assertEquals(old_work_dir, os.getcwd())
Example #11
0
    def make_postgres(self):
        self.set_env_vars('make')
        make_cmd = ['make']

        make_parallelism = os.environ.get('YB_MAKE_PARALLELISM')
        if make_parallelism:
            make_cmd += ['-j', str(int(make_parallelism))]
        os.environ['YB_COMPILER_TYPE'] = self.compiler_type

        # Create a script allowing to easily run "make" from the build directory with the right
        # environment.
        make_script_content = "#!/usr/bin/env bash\n"
        for env_var_name in [
                'YB_SRC_ROOT', 'YB_BUILD_ROOT', 'YB_BUILD_TYPE', 'CFLAGS',
                'CXXFLAGS', 'LDFLAGS', 'PATH'
        ]:
            env_var_value = os.environ[env_var_name]
            if env_var_value is None:
                raise RuntimeError("Expected env var %s to be set" %
                                   env_var_name)
            make_script_content += "export %s=%s\n" % (
                env_var_name, quote_for_bash(env_var_value))
        make_script_content += 'make "$@"\n'

        for work_dir in [
                self.pg_build_root,
                os.path.join(self.pg_build_root, 'contrib')
        ]:
            with WorkDirContext(work_dir):
                # Create a script to run Make easily with the right environment.
                make_script_path = 'make.sh'
                with open(make_script_path, 'w') as out_f:
                    out_f.write(make_script_content)
                run_program(['chmod', 'u+x', make_script_path])

                # Actually run Make.
                if is_verbose_mode():
                    logging.info("Running make in the %s directory", work_dir)
                make_result = run_program(make_cmd)
                write_program_output_to_file('make', make_result, work_dir)
                make_install_result = run_program(['make', 'install'])
                write_program_output_to_file('make_install',
                                             make_install_result, work_dir)
                logging.info("Successfully ran make in the %s directory",
                             work_dir)
Example #12
0
    def test_error_msg(self):
        # No error message if exit code is 0.
        self.assertIsNone(run_program('true').error_msg)

        result = run_program(
            'echo " This is stdout! "; echo " This is stderr! " >&2; exit 1',
            error_ok=True)
        self.assertRegexpMatches(
            result.error_msg.strip(), """
Non-zero exit code 1 from external program {{ echo " This is stdout! "; echo " This is stderr! " >&2; exit 1 }} running in '.*'.
Standard output from external program {{ echo " This is stdout! "; echo " This is stderr! " >&2; exit 1 }} running in '.*':
 This is stdout!
\(end of standard output\)

Standard error from external program {{ echo " This is stdout! "; echo " This is stderr! " >&2; exit 1 }} running in '.*':
 This is stderr!
\(end of standard error\)
""".strip())
Example #13
0
def main():
    if os.environ.get('YB_SKIP_INITIAL_SYS_CATALOG_SNAPSHOT', '0') == '1':
        logging.info('YB_SKIP_INITIAL_SYS_CATALOG_SNAPSHOT is set, skipping initdb')
        return

    build_root = os.environ['YB_BUILD_ROOT']
    tool_name = 'create_initial_sys_catalog_snapshot'
    tool_path = os.path.join(build_root, 'tests-pgwrapper', tool_name)
    snapshot_dest_path = os.path.join(build_root, 'share', 'initial_sys_catalog_snapshot')

    file_to_check = os.path.join(snapshot_dest_path, 'exported_tablet_metadata_changes')
    if (os.path.exists(file_to_check) and
            os.environ.get('YB_RECREATE_INITIAL_SYS_CATALOG_SNAPSHOT', '') != '1'):
        logging.info(
            "Initial sys catalog snapshot already exists, not re-creating: %s",
            snapshot_dest_path)
        return
    if os.path.exists(snapshot_dest_path):
        logging.info("Removing initial sys catalog snapshot data at: %s", snapshot_dest_path)
        shutil.rmtree(snapshot_dest_path)

    mkdir_p(os.path.dirname(snapshot_dest_path))

    start_time_sec = time.time()
    logging.info("Starting creating initial system catalog snapshot data")
    logging.info("Logging to: %s", os.path.join(build_root, tool_name + '.err'))
    os.environ['YB_EXTRA_GTEST_FLAGS'] = (
        '--initial_sys_catalog_snapshot_dest_path=' + snapshot_dest_path
    )
    os.environ['YB_CTEST_VERBOSE'] = '1'
    with WorkDirContext(build_root):
        run_program(
            [
                os.path.join(YB_SRC_ROOT, 'build-support', 'run-test.sh'),
                tool_path,
            ],
            stdout_stderr_prefix=tool_name,
            shell=True
        )
    elapsed_time_sec = time.time() - start_time_sec
    logging.info(
        "Initial system catalog snapshot data creation took %1.f sec. Wrote data to: %s",
        elapsed_time_sec, snapshot_dest_path)
Example #14
0
    def configure_postgres(self):
        if is_verbose_mode():
            logging.info("Running configure in the postgres build directory")
        # Don't enable -Werror when running configure -- that can affect the resulting
        # configuration.
        self.set_env_vars('configure')
        configure_cmd_line = [
                './configure',
                '--prefix', self.pg_prefix,
                '--enable-depend',
                # We're enabling debug symbols for all types of builds.
                '--enable-debug']
        if not get_bool_env_var('YB_NO_PG_CONFIG_CACHE'):
            configure_cmd_line.append('--config-cache')

        # We get readline-related errors in ASAN/TSAN, so let's disable readline there.
        if self.build_type in ['asan', 'tsan']:
            configure_cmd_line += ['--without-readline']

        if self.build_type != 'release':
            configure_cmd_line += ['--enable-cassert']
        configure_result = run_program(configure_cmd_line, error_ok=True)
        if configure_result.failure():
            rerun_configure = False
            for line in configure_result.stderr.splitlines():
                if REMOVE_CONFIG_CACHE_MSG_RE.search(line.strip()):
                    logging.info("Configure failed because of stale config.cache, re-running.")
                    run_program('rm -f config.cache')
                    rerun_configure = True
                    break

            if not rerun_configure:
                logging.error("Standard error from configure:\n" + configure_result.stderr)
                raise RuntimeError("configure failed")

            configure_result = run_program(configure_cmd_line)

        if is_verbose_mode():
            configure_result.print_output_to_stdout()
        write_program_output_to_file('configure', configure_result, self.pg_build_root)

        logging.info("Successfully ran configure in the postgres build directory")
Example #15
0
    def configure_postgres(self):
        logging.info("Running configure in the postgres build directory")
        # Don't enable -Werror when running configure -- that can affect the resulting
        # configuration.
        self.set_env_vars('configure')
        configure_cmd_line = [
            './configure',
            '--prefix',
            self.pg_prefix,
            '--config-cache',
            '--enable-depend',
            # We're enabling debug symbols for all types of builds.
            '--enable-debug'
        ]
        if self.build_type != 'release':
            configure_cmd_line += ['--enable-cassert']
        configure_result = run_program(configure_cmd_line, error_ok=True)
        if configure_result.failure():
            rerun_configure = False
            for line in configure_result.stderr.splitlines():
                if REMOVE_CONFIG_CACHE_MSG_RE.search(line.strip()):
                    logging.info(
                        "Configure failed because of stale config.cache, re-running."
                    )
                    run_program('rm -f config.cache', capture_output=True)
                    rerun_configure = True
                    break

            if not rerun_configure:
                logging.error("Standard error from configure:\n" +
                              configure_result.stderr)
                raise RuntimeError("configure failed")

            configure_result = run_program(configure_cmd_line)

        configure_result.print_output_to_stdout()

        logging.info(
            "Successfully ran configure in the postgres build directory")
Example #16
0
    def make_postgres(self):
        self.set_env_vars('make')
        make_cmd = ['make']

        make_parallelism = os.environ.get('YB_MAKE_PARALLELISM')
        if make_parallelism:
            make_cmd += ['-j', str(int(make_parallelism))]

        # Create a script allowing to easily run "make" from the build directory with the right
        # environment.
        make_script_content = "#!/usr/bin/env bash\n"
        for env_var_name in [
                'YB_SRC_ROOT', 'YB_BUILD_ROOT', 'YB_BUILD_TYPE', 'CFLAGS', 'CXXFLAGS', 'LDFLAGS',
                'PATH']:
            env_var_value = os.environ[env_var_name]
            if env_var_value is None:
                raise RuntimeError("Expected env var %s to be set" % env_var_name)
            make_script_content += "export %s=%s\n" % (env_var_name, quote_for_bash(env_var_value))
        make_script_content += 'make "$@"\n'

        for work_dir in [self.pg_build_root, os.path.join(self.pg_build_root, 'contrib')]:
            with WorkDirContext(work_dir):
                # Create a script to run Make easily with the right environment.
                make_script_path = 'make.sh'
                with open(make_script_path, 'w') as out_f:
                    out_f.write(make_script_content)
                run_program(['chmod', 'u+x', make_script_path])

                # Actually run Make.
                if is_verbose_mode():
                    logging.info("Running make in the %s directory", work_dir)
                make_result = run_program(make_cmd)
                write_program_output_to_file('make', make_result, work_dir)
                make_install_result = run_program(['make', 'install'])
                write_program_output_to_file('make_install', make_install_result, work_dir)
                logging.info("Successfully ran make in the %s directory", work_dir)
Example #17
0
def main() -> None:
    parser = argparse.ArgumentParser()
    parser.add_argument('--program_name', type=str, help="Program name")
    parser.add_argument('--output_file_path',
                        type=str,
                        help="Path to output file")
    args = parser.parse_args()

    start_time_sec = time.time()
    build_root = os.environ['YB_BUILD_ROOT']

    with WorkDirContext(build_root):
        content = run_program([
            os.path.join(build_root, 'bin', args.program_name),
            "--dump_flags_xml",
        ],
                              shell=True)

        with open(args.output_file_path, 'w+', encoding='utf-8') as f:
            f.write(content.stdout)

    elapsed_time_sec = time.time() - start_time_sec
    logging.info("Generated flags_metadata for %s in %.1f sec",
                 args.program_name, elapsed_time_sec)
Example #18
0
    def make_postgres(self) -> None:
        self.set_env_vars('make')
        # Postgresql requires MAKELEVEL to be 0 or non-set when calling its make.
        # But in case YB project is built with make, MAKELEVEL is not 0 at this point.
        make_cmd = ['make', 'MAKELEVEL=0']
        if is_macos_arm64():
            make_cmd = ['arch', '-arm64'] + make_cmd

        make_parallelism_str: Optional[str] = os.environ.get(
            'YB_MAKE_PARALLELISM')
        make_parallelism: Optional[int] = None
        if make_parallelism_str is not None:
            make_parallelism = int(make_parallelism_str)
        if self.build_uses_remote_compilation and not self.remote_compilation_allowed:
            # Since we're building everything locally in this case, and YB_MAKE_PARALLELISM is
            # likely specified for distributed compilation, cap it at some factor times the number
            # of CPU cores.
            parallelism_cap = multiprocessing.cpu_count() * 2
            if make_parallelism:
                make_parallelism = min(parallelism_cap, make_parallelism)
            else:
                make_parallelism = parallelism_cap

        if make_parallelism:
            make_cmd += ['-j', str(int(make_parallelism))]

        self.set_env_var('YB_COMPILER_TYPE', self.compiler_type)

        # Create a script allowing to easily run "make" from the build directory with the right
        # environment.
        env_script_content = ''
        for env_var_name in CONFIG_ENV_VARS:
            env_var_value = os.environ.get(env_var_name)
            if env_var_value is None:
                raise RuntimeError("Expected env var %s to be set" %
                                   env_var_name)
            env_script_content += "export %s=%s\n" % (
                env_var_name, quote_for_bash(env_var_value))

        pg_compile_commands_paths = []

        third_party_extensions_dir = os.path.join(self.pg_build_root,
                                                  'third-party-extensions')
        work_dirs = [
            self.pg_build_root,
            os.path.join(self.pg_build_root, 'contrib'),
            third_party_extensions_dir
        ]

        for work_dir in work_dirs:
            with WorkDirContext(work_dir):
                # Create a script to run Make easily with the right environment.
                make_script_path = 'make.sh'
                with open(make_script_path, 'w') as out_f:
                    out_f.write('#!/usr/bin/env bash\n'
                                '. "${BASH_SOURCE%/*}"/env.sh\n'
                                'make "$@"\n')
                with open('env.sh', 'w') as out_f:
                    out_f.write(env_script_content)

                run_program(['chmod', 'u+x', make_script_path])

                make_cmd_suffix = []
                if work_dir == third_party_extensions_dir:
                    make_cmd_suffix = ['PG_CONFIG=' + self.pg_config_path]

                # Actually run Make.
                if is_verbose_mode():
                    logging.info("Running make in the %s directory", work_dir)

                complete_make_cmd = make_cmd + make_cmd_suffix
                complete_make_cmd_str = shlex_join(complete_make_cmd)
                complete_make_install_cmd = make_cmd + ['install'
                                                        ] + make_cmd_suffix
                attempt = 0
                while attempt <= TRANSIENT_BUILD_RETRIES:
                    attempt += 1
                    make_result = run_program(
                        complete_make_cmd_str,
                        stdout_stderr_prefix='make',
                        cwd=work_dir,
                        error_ok=True,
                        shell=True  # TODO: get rid of shell=True.
                    )
                    if make_result.failure():
                        transient_err = False
                        stderr_lines = make_result.get_stderr().split('\n')
                        for line in stderr_lines:
                            if any(transient_error_pattern in line
                                   for transient_error_pattern in
                                   TRANSIENT_BUILD_ERRORS):
                                transient_err = True
                                logging.info(f'Transient error: {line}')
                                break
                        if transient_err:
                            logging.info(
                                f"Transient error during build attempt {attempt}. "
                                f"Re-trying make command: {complete_make_cmd_str}."
                            )
                        else:
                            make_result.print_output_to_stdout()
                            raise RuntimeError("PostgreSQL compilation failed")
                    else:
                        logging.info(
                            "Successfully ran 'make' in the %s directory",
                            work_dir)
                        break  # No error, break out of retry loop
                else:
                    raise RuntimeError(
                        f"Maximum build attempts reached ({TRANSIENT_BUILD_RETRIES} attempts)."
                    )

                if self.build_type != 'compilecmds' or work_dir == self.pg_build_root:
                    run_program(
                        ' '.join(
                            shlex.quote(arg)
                            for arg in complete_make_install_cmd),
                        stdout_stderr_prefix='make_install',
                        cwd=work_dir,
                        error_ok=True,
                        shell=True  # TODO: get rid of shell=True.
                    ).print_output_and_raise_error_if_failed()
                    logging.info(
                        "Successfully ran 'make install' in the %s directory",
                        work_dir)
                else:
                    logging.info(
                        "Not running 'make install' in the %s directory since we are only "
                        "generating the compilation database", work_dir)

                if self.export_compile_commands:
                    logging.info(
                        "Generating the compilation database in directory '%s'",
                        work_dir)

                    compile_commands_path = os.path.join(
                        work_dir, 'compile_commands.json')
                    self.set_env_var('YB_PG_SKIP_CONFIG_STATUS', '1')
                    if not os.path.exists(compile_commands_path):
                        run_program(['compiledb', 'make', '-n'] +
                                    make_cmd_suffix,
                                    capture_output=False)
                    del os.environ['YB_PG_SKIP_CONFIG_STATUS']

                    if not os.path.exists(compile_commands_path):
                        raise RuntimeError(
                            "Failed to generate compilation database at: %s" %
                            compile_commands_path)
                    pg_compile_commands_paths.append(compile_commands_path)

        if self.export_compile_commands:
            self.write_compile_commands_files(pg_compile_commands_paths)
Example #19
0
    def configure_postgres(self) -> None:
        if is_verbose_mode():
            logging.info("Running configure in the postgres build directory")
        # Don't enable -Werror when running configure -- that can affect the resulting
        # configuration.
        configure_cmd_line = [
            './configure',
            '--prefix',
            self.pg_prefix,
            '--with-extra-version=-YB-' + self.get_yb_version(),
            '--enable-depend',
            '--with-icu',
            '--with-ldap',
            '--with-openssl',
            '--with-gssapi',
            # Options are ossp (original/old implementation), bsd (BSD) and e2fs
            # (libuuid-based for Unix/Mac).
            '--with-uuid=e2fs',
            '--with-libedit-preferred',
            '--with-includes=' + self.openssl_include_dir,
            '--with-libraries=' + self.openssl_lib_dir,
            # We're enabling debug symbols for all types of builds.
            '--enable-debug'
        ]
        if is_macos_arm64():
            configure_cmd_line.insert(0, '/opt/homebrew/bin/bash')

        if not get_bool_env_var('YB_NO_PG_CONFIG_CACHE'):
            configure_cmd_line.append('--config-cache')

        # We get readline-related errors in ASAN/TSAN, so let's disable readline there.
        if self.build_type in ['asan', 'tsan']:
            # TODO: do we still need this limitation?
            configure_cmd_line += ['--without-readline']

        if self.build_type != 'release':
            configure_cmd_line += ['--enable-cassert']
        # Unset YB_SHOW_COMPILER_COMMAND_LINE when configuring postgres to avoid unintended side
        # effects from additional compiler output.
        with EnvVarContext(YB_SHOW_COMPILER_COMMAND_LINE=None):
            configure_result = run_program(configure_cmd_line, error_ok=True)
        if configure_result.failure():
            rerun_configure = False
            for line in configure_result.stderr.splitlines():
                if REMOVE_CONFIG_CACHE_MSG_RE.search(line.strip()):
                    logging.info(
                        "Configure failed because of stale config.cache, re-running."
                    )
                    run_program('rm -f config.cache')
                    rerun_configure = True
                    break

            if not rerun_configure:
                logging.error("Standard error from configure:\n" +
                              configure_result.stderr)
                config_log_path = os.path.join(self.pg_build_root,
                                               "config.log")
                if os.path.exists(config_log_path):
                    with open(config_log_path) as config_log_file:
                        config_log_str = config_log_file.read()
                    logging.info(f"Contents of {config_log_path}:")
                    sys.stderr.write(config_log_str + "\n")
                else:
                    logging.warning(f"File not found: {config_log_path}")
                raise RuntimeError("configure failed")

            configure_result = run_program(configure_cmd_line,
                                           shell=True,
                                           stdout_stderr_prefix='configure',
                                           error_ok=True)

        if is_verbose_mode() and configure_result.success():
            configure_result.print_output_to_stdout()

        configure_result.print_output_and_raise_error_if_failed()

        logging.info(
            "Successfully ran configure in the postgres build directory")
Example #20
0
 def clean_postgres(self):
     logging.info("Removing the postgres build and installation directories")
     for dir_to_delete in [self.pg_build_root, self.pg_prefix]:
         logging.info("Deleting the directory '%s'", dir_to_delete)
         # rm -rf is much faster than Python's rmtree.
         run_program(['rm', '-rf', dir_to_delete], capture_output=False)
Example #21
0
    def make_postgres(self):
        self.set_env_vars('make')
        make_cmd = ['make']

        make_parallelism = os.environ.get('YB_MAKE_PARALLELISM')
        if make_parallelism:
            make_parallelism = int(make_parallelism)
        if self.build_uses_remote_compilation and not self.remote_compilation_allowed:
            # Since we're building everything locally in this case, and YB_MAKE_PARALLELISM is
            # likely specified for distributed compilation, cap it at some factor times the number
            # of CPU cores.
            parallelism_cap = multiprocessing.cpu_count() * 2
            if make_parallelism:
                make_parallelism = min(parallelism_cap, make_parallelism)
            else:
                make_parallelism = cpu_count

        if make_parallelism:
            make_cmd += ['-j', str(int(make_parallelism))]

        os.environ['YB_COMPILER_TYPE'] = self.compiler_type

        # Create a script allowing to easily run "make" from the build directory with the right
        # environment.
        env_script_content = ''
        for env_var_name in [
                'YB_SRC_ROOT', 'YB_BUILD_ROOT', 'YB_BUILD_TYPE', 'CFLAGS',
                'CXXFLAGS', 'LDFLAGS', 'PATH'
        ]:
            env_var_value = os.environ[env_var_name]
            if env_var_value is None:
                raise RuntimeError("Expected env var %s to be set" %
                                   env_var_name)
            env_script_content += "export %s=%s\n" % (
                env_var_name, quote_for_bash(env_var_value))

        compile_commands_files = []

        for work_dir in [
                self.pg_build_root,
                os.path.join(self.pg_build_root, 'contrib')
        ]:
            with WorkDirContext(work_dir):
                # Create a script to run Make easily with the right environment.
                make_script_path = 'make.sh'
                with open(make_script_path, 'w') as out_f:
                    out_f.write('#!/usr/bin/env bash\n'
                                '. "${BASH_SOURCE%/*}"/env.sh\n'
                                'make "$@"\n')
                with open('env.sh', 'w') as out_f:
                    out_f.write(env_script_content)

                run_program(['chmod', 'u+x', make_script_path])

                # Actually run Make.
                if is_verbose_mode():
                    logging.info("Running make in the %s directory", work_dir)
                make_result = run_program(make_cmd)
                write_program_output_to_file('make', make_result, work_dir)
                make_install_result = run_program(['make', 'install'])
                write_program_output_to_file('make_install',
                                             make_install_result, work_dir)
                logging.info("Successfully ran make in the %s directory",
                             work_dir)

                if self.export_compile_commands:
                    logging.info(
                        "Generating the compilation database in directory '%s'",
                        work_dir)

                    compile_commands_path = os.path.join(
                        work_dir, 'compile_commands.json')
                    os.environ['YB_PG_SKIP_CONFIG_STATUS'] = '1'
                    if (not os.path.exists(compile_commands_path)
                            or not self.export_compile_commands_lazily):
                        run_program(['compiledb', 'make', '-n'],
                                    capture_output=False)
                    del os.environ['YB_PG_SKIP_CONFIG_STATUS']

                    if not os.path.exists(compile_commands_path):
                        raise RuntimeError(
                            "Failed to generate compilation database at: %s" %
                            compile_commands_path)
                    compile_commands_files.append(compile_commands_path)

        if self.export_compile_commands:
            self.combine_compile_commands(compile_commands_files)
Example #22
0
 def test_run_program_noerror(self):
     result = run_program("true")
     self.assertEquals(0, result.returncode)
     self.assertEquals('', result.stdout)
     self.assertEquals('', result.stderr)
Example #23
0
 def test_error_reporing(self):
     with self.assertRaises(ExternalProgramError):
         run_program('false')
Example #24
0
 def test_exit_codes(self):
     for exit_code in [0, 1, 2, 3, 10, 20, 100, 150, 200, 250, 255]:
         result = run_program("exit %d" % exit_code,
                              shell=True,
                              error_ok=exit_code != 0)
         self.assertEquals(exit_code, result.returncode)
Example #25
0
    def make_postgres(self):
        self.set_env_vars('make')
        # Postgresql requires MAKELEVEL to be 0 or non-set when calling its make.
        # But in case YB project is built with make, MAKELEVEL is not 0 at this point.
        make_cmd = ['make', 'MAKELEVEL=0']

        make_parallelism = os.environ.get('YB_MAKE_PARALLELISM')
        if make_parallelism:
            make_parallelism = int(make_parallelism)
        if self.build_uses_remote_compilation and not self.remote_compilation_allowed:
            # Since we're building everything locally in this case, and YB_MAKE_PARALLELISM is
            # likely specified for distributed compilation, cap it at some factor times the number
            # of CPU cores.
            parallelism_cap = multiprocessing.cpu_count() * 2
            if make_parallelism:
                make_parallelism = min(parallelism_cap, make_parallelism)
            else:
                make_parallelism = cpu_count

        if make_parallelism:
            make_cmd += ['-j', str(int(make_parallelism))]

        self.set_env_var('YB_COMPILER_TYPE', self.compiler_type)

        # Create a script allowing to easily run "make" from the build directory with the right
        # environment.
        env_script_content = ''
        for env_var_name in CONFIG_ENV_VARS:
            env_var_value = os.environ.get(env_var_name)
            if env_var_value is None:
                raise RuntimeError("Expected env var %s to be set" %
                                   env_var_name)
            env_script_content += "export %s=%s\n" % (
                env_var_name, quote_for_bash(env_var_value))

        compile_commands_files = []

        work_dirs = [self.pg_build_root]
        if self.build_type != 'compilecmds':
            work_dirs.append(os.path.join(self.pg_build_root, 'contrib'))

        for work_dir in work_dirs:
            with WorkDirContext(work_dir):
                # Create a script to run Make easily with the right environment.
                if self.should_build:
                    make_script_path = 'make.sh'
                    with open(make_script_path, 'w') as out_f:
                        out_f.write('#!/usr/bin/env bash\n'
                                    '. "${BASH_SOURCE%/*}"/env.sh\n'
                                    'make "$@"\n')
                    with open('env.sh', 'w') as out_f:
                        out_f.write(env_script_content)

                    run_program(['chmod', 'u+x', make_script_path])

                    # Actually run Make.
                    if is_verbose_mode():
                        logging.info("Running make in the %s directory",
                                     work_dir)
                    run_program(make_cmd,
                                stdout_stderr_prefix='make',
                                cwd=work_dir,
                                shell=True,
                                error_ok=True
                                ).print_output_and_raise_error_if_failed()
                if self.build_type == 'compilecmds':
                    logging.info(
                        "Not running make install in the %s directory since we are only "
                        "generating the compilation database", work_dir)
                else:
                    run_program('make install',
                                stdout_stderr_prefix='make_install',
                                cwd=work_dir,
                                shell=True,
                                error_ok=True
                                ).print_output_and_raise_error_if_failed()
                    logging.info("Successfully ran make in the %s directory",
                                 work_dir)

                if self.export_compile_commands:
                    logging.info(
                        "Generating the compilation database in directory '%s'",
                        work_dir)

                    compile_commands_path = os.path.join(
                        work_dir, 'compile_commands.json')
                    self.set_env_var('YB_PG_SKIP_CONFIG_STATUS', '1')
                    if (not os.path.exists(compile_commands_path)
                            or not self.export_compile_commands_lazily):
                        run_program(['compiledb', 'make', '-n'],
                                    capture_output=False)
                    del os.environ['YB_PG_SKIP_CONFIG_STATUS']

                    if not os.path.exists(compile_commands_path):
                        raise RuntimeError(
                            "Failed to generate compilation database at: %s" %
                            compile_commands_path)
                    compile_commands_files.append(compile_commands_path)

        if self.export_compile_commands:
            self.combine_compile_commands(compile_commands_files)
Example #26
0
 def _capture_error_log_from_cmd(self, cmd):
     with LogCapture(level=logging.ERROR) as captured_logs:
         run_program(cmd, error_ok=True, report_errors=True)
     return str(captured_logs).strip()