def build_postgres(self): if self.args.clean: self.clean_postgres() mkdir_p(self.pg_build_root) self.set_env_vars('configure') saved_build_stamp = self.get_saved_build_stamp() initial_build_stamp = self.get_build_stamp(include_env_vars=True) initial_build_stamp_no_env = self.get_build_stamp(include_env_vars=False) logging.info("PostgreSQL build stamp:\n%s", initial_build_stamp) if initial_build_stamp == saved_build_stamp: logging.info( "PostgreSQL is already up-to-date in directory %s, not rebuilding.", self.pg_build_root) return with WorkDirContext(self.pg_build_root): self.sync_postgres_source() if os.environ.get('YB_PG_SKIP_CONFIGURE', '0') != '1': self.configure_postgres() self.make_postgres() final_build_stamp_no_env = self.get_build_stamp(include_env_vars=False) if final_build_stamp_no_env == initial_build_stamp_no_env: logging.info("Updating build stamp file at %s", self.build_stamp_path) with open(self.build_stamp_path, 'w') as build_stamp_file: build_stamp_file.write(initial_build_stamp) else: logging.warning("PostgreSQL build stamp changed during the build! Not updating.")
def build_postgres(self): start_time_sec = time.time() if self.args.clean: self.clean_postgres() mkdir_p(self.pg_build_root) self.set_env_vars('configure') saved_build_stamp = self.get_saved_build_stamp() initial_build_stamp = self.get_build_stamp(include_env_vars=True) initial_build_stamp_no_env = self.get_build_stamp( include_env_vars=False) logging.info("PostgreSQL build stamp:\n%s", initial_build_stamp) if initial_build_stamp == saved_build_stamp: if self.export_compile_commands: logging.info( "Even though PostgreSQL is already up-to-date in directory %s, we still need " "to create compile_commands.json, so proceeding with %s", self.pg_build_root, self.steps_description()) else: logging.info( "PostgreSQL is already up-to-date in directory %s, skipping %s.", self.pg_build_root, self.steps_description()) return with WorkDirContext(self.pg_build_root): if self.should_configure: self.sync_postgres_source() configure_start_time_sec = time.time() self.configure_postgres() logging.info( "The configure step of building PostgreSQL took %.1f sec", time.time() - configure_start_time_sec) if self.should_make: make_start_time_sec = time.time() self.make_postgres() logging.info( "The make step of building PostgreSQL took %.1f sec", time.time() - make_start_time_sec) if self.should_make: # Guard against the code having changed while we were building it. final_build_stamp_no_env = self.get_build_stamp( include_env_vars=False) if final_build_stamp_no_env == initial_build_stamp_no_env: logging.info("Updating build stamp file at %s", self.build_stamp_path) with open(self.build_stamp_path, 'w') as build_stamp_file: build_stamp_file.write(initial_build_stamp) else: logging.warning( "PostgreSQL build stamp changed during the build! Not updating." ) logging.info("PostgreSQL build (%s) took %.1f sec", self.steps_description(), time.time() - start_time_sec)
def build_postgres(self): if self.args.clean: self.clean_postgres() mkdir_p(self.pg_build_root) with WorkDirContext(self.pg_build_root): self.sync_postgres_source() self.configure_postgres() self.make_postgres()
def build_postgres(self): if self.args.clean: self.clean_postgres() mkdir_p(self.pg_build_root) with WorkDirContext(self.pg_build_root): self.sync_postgres_source() self.configure_postgres() self.make_postgres()
def main(): if os.environ.get('YB_SKIP_INITIAL_SYS_CATALOG_SNAPSHOT', '0') == '1': logging.info( 'YB_SKIP_INITIAL_SYS_CATALOG_SNAPSHOT is set, skipping initdb') return build_root = os.environ['YB_BUILD_ROOT'] tool_name = 'create_initial_sys_catalog_snapshot' tool_path = os.path.join(build_root, 'tests-pgwrapper', tool_name) snapshot_dest_path = os.path.join(build_root, 'share', 'initial_sys_catalog_snapshot') file_to_check = os.path.join(snapshot_dest_path, 'exported_tablet_metadata_changes') if (os.path.exists(file_to_check) and os.environ.get( 'YB_RECREATE_INITIAL_SYS_CATALOG_SNAPSHOT', '') != '1'): logging.info( "Initial sys catalog snapshot already exists, not re-creating: %s", snapshot_dest_path) return if os.path.exists(snapshot_dest_path): logging.info("Removing initial sys catalog snapshot data at: %s", snapshot_dest_path) shutil.rmtree(snapshot_dest_path) mkdir_p(os.path.dirname(snapshot_dest_path)) start_time_sec = time.time() logging.info("Starting creating initial system catalog snapshot data") logging.info("Logging to: %s", os.path.join(build_root, tool_name + '.err')) os.environ['YB_EXTRA_GTEST_FLAGS'] = ( '--initial_sys_catalog_snapshot_dest_path=' + snapshot_dest_path) os.environ['YB_CTEST_VERBOSE'] = '1' with WorkDirContext(build_root): initdb_result = run_program([ os.path.join(YB_SRC_ROOT, 'build-support', 'run-test.sh'), tool_path, ], stdout_stderr_prefix=tool_name, shell=True, error_ok=True) elapsed_time_sec = time.time() - start_time_sec if initdb_result.failure(): initdb_result.print_output_to_stdout() raise RuntimeError("initdb failed in %.1f sec" % elapsed_time_sec) logging.info( "Initial system catalog snapshot data creation took %1.f sec. Wrote data to: %s", elapsed_time_sec, snapshot_dest_path)
def build_postgres(self): start_time_sec = time.time() if self.args.clean: self.clean_postgres() mkdir_p(self.pg_build_root) self.set_env_vars('configure') saved_build_stamp = self.get_saved_build_stamp() initial_build_stamp = self.get_build_stamp(include_env_vars=True) initial_build_stamp_no_env = self.get_build_stamp(include_env_vars=False) logging.info("PostgreSQL build stamp:\n%s", initial_build_stamp) if initial_build_stamp == saved_build_stamp: logging.info( "PostgreSQL is already up-to-date in directory %s, not rebuilding.", self.pg_build_root) if self.export_compile_commands: self.should_build = False logging.info("Still need to create compile_commands.json, proceeding.") else: return with WorkDirContext(self.pg_build_root): if self.should_build: self.sync_postgres_source() if os.environ.get('YB_PG_SKIP_CONFIGURE', '0') != '1': configure_start_time_sec = time.time() self.configure_postgres() logging.info("The configure step of building PostgreSQL took %.1f sec", time.time() - configure_start_time_sec) make_start_time_sec = time.time() self.make_postgres() logging.info("The make step of building PostgreSQL took %.1f sec", time.time() - make_start_time_sec) final_build_stamp_no_env = self.get_build_stamp(include_env_vars=False) if final_build_stamp_no_env == initial_build_stamp_no_env: logging.info("Updating build stamp file at %s", self.build_stamp_path) with open(self.build_stamp_path, 'w') as build_stamp_file: build_stamp_file.write(initial_build_stamp) else: logging.warning("PostgreSQL build stamp changed during the build! Not updating.") logging.info("PostgreSQL build took %.1f sec", time.time() - start_time_sec)
def run_pvs_analyzer(self): pvs_config_path = os.path.join(self.args.build_root, 'PVS-Studio.cfg') rules_config_path = os.path.join(YB_SRC_ROOT, 'yugabytedb.pvsconfig') if not os.path.exists(rules_config_path): raise IOError( "PVS Studio rules configuration file does not exist: %s" % rules_config_path) with open(pvs_config_path, 'w') as pvs_config_file: pvs_config_file.write( '# This file was automatically generated by %s.\n' 'rules-config=%s\n' % ( __file__, rules_config_path)) pvs_output_dir = os.path.join(self.args.build_root, 'pvs_output') mkdir_p(pvs_output_dir) pvs_log_path = os.path.join(pvs_output_dir, 'pvs_results.log') if os.path.exists(pvs_log_path): logging.info("Removing existing file %s", pvs_log_path) os.remove(pvs_log_path) combined_raw_compile_commands_path = os.path.join( self.args.build_root, COMBINED_RAW_COMPILE_COMMANDS_FILE_NAME) if not os.path.exists(combined_raw_compile_commands_path): raise IOError("Raw compilation commands file does not exist: %s" % combined_raw_compile_commands_path) if self.args.file_name_regex: compile_commands_path = os.path.join( self.args.build_root, 'raw_compile_commands_filtered_for_analyze.json') filter_compile_commands( combined_raw_compile_commands_path, compile_commands_path, self.args.file_name_regex) else: compile_commands_path = combined_raw_compile_commands_path if not os.path.exists(compile_commands_path): raise IOError("Compilation commands file does not exist: %s" % compile_commands_path) pvs_studio_analyzer_executable = find_executable('pvs-studio-analyzer', must_find=True) plog_converter_executable = find_executable('plog-converter', must_find=True) analyzer_cmd_line = [ pvs_studio_analyzer_executable, 'analyze', '--cfg', pvs_config_path, '--file', compile_commands_path, '--output-file', pvs_log_path, '-j', str(multiprocessing.cpu_count()), '--disableLicenseExpirationCheck' ] analyzer_exit_code = 0 try: check_call_and_log(analyzer_cmd_line) except CalledProcessError as analyzer_error: analyzer_exit_code = analyzer_error.returncode if analyzer_exit_code in PVS_ANALYZER_EXIT_CODE_DETAILS: logging.info( "Details for PVS Studio Analyzer return code %d: %s", analyzer_exit_code, PVS_ANALYZER_EXIT_CODE_DETAILS[analyzer_exit_code]) if not os.path.exists(pvs_log_path): raise IOError( "PVS Studio Analyzer failed to generate output file at %s", pvs_log_path) if analyzer_exit_code not in PVS_ANALYZER_EXIT_CODE_DETAILS: raise IOError("Unrecognized PVS Studio Analyzer exit code: %s", analyzer_exit_code) pvs_output_path = os.path.join(pvs_output_dir, 'pvs_tasks.csv') log_converter_cmd_line_csv = [ plog_converter_executable, '--renderTypes', 'tasklist', '--output', pvs_output_path, pvs_log_path ] check_call_and_log(log_converter_cmd_line_csv) html_output_dir = os.path.join(pvs_output_dir, 'pvs_html') if os.path.exists(html_output_dir): logging.info("Deleting the existing directory %s", html_output_dir) rm_rf(html_output_dir) log_converter_cmd_line_html = [ plog_converter_executable, '--renderTypes', 'fullhtml', '--srcRoot', YB_SRC_ROOT, '--output', html_output_dir, pvs_log_path ] check_call_and_log(log_converter_cmd_line_html)
def test_mkdir_p(self): tmp_dir_path = '/tmp/yugabyte_mkdir_p_test_%d' % randint(1, 1000000000) for _ in range(2): mkdir_p(tmp_dir_path) self.assertTrue(os.path.isdir(tmp_dir_path)) os.rmdir(tmp_dir_path)