def expand_value(self, old_value): """ Expand old_value with the following changes: - Replace ${project.version} with the Java version from pom.xml. - Replace the leading "thirdparty/" with the respective YB_THIRDPARTY_DIR from the build. - Replace $BUILD_ROOT with the actual build_root. """ # Substitution for Java. new_value = old_value.replace('${project.version}', self.java_project_version) # Substitution for thirdparty. thirdparty_prefix_match = THIRDPARTY_PREFIX_RE.match(new_value) if thirdparty_prefix_match: new_value = os.path.join(get_thirdparty_dir(), thirdparty_prefix_match.group(1)) # Substitution for BUILD_ROOT. new_value = new_value.replace("$BUILD_ROOT", self.build_root) thirdparty_intrumentation = "clang_uninstrumented" if is_macos( ) else "uninstrumented" new_value = new_value.replace( "$THIRDPARTY_BUILD_SPECIFIC_DIR", os.path.join(get_thirdparty_dir(), "installed", thirdparty_intrumentation)) if new_value != old_value: logging.info("Substituting '{}' -> '{}' in manifest".format( old_value, new_value)) return new_value
def wait_for_clock_sync(): if is_macos(): return start_time = time.time() last_log_time = start_time waited_for_clock_sync = False check_result = is_clock_synchronized() while not check_result.is_synchronized: if not waited_for_clock_sync: logging.info("Clock not synchronized, waiting...") waited_for_clock_sync = True time.sleep(0.25) cur_time = time.time() if cur_time - last_log_time > CLOCK_SYNC_WAIT_LOGGING_INTERVAL_SEC: logging.info("Waiting for clock to be synchronized for %.2f sec" % (cur_time - last_log_time)) last_log_time = cur_time if cur_time - start_time > MAX_TIME_TO_WAIT_FOR_CLOCK_SYNC_SEC: raise RuntimeError( "Waited for %.2f sec for clock synchronization, still not synchronized, " "check result: %s" % (cur_time - start_time, check_result)) check_result = is_clock_synchronized() if waited_for_clock_sync: cur_time = time.time() logging.info("Waited for %.2f for clock synchronization" % (cur_time - start_time))
def init_spark_context(details=[]): global spark_context if spark_context: return build_type = yb_dist_tests.global_conf.build_type from pyspark import SparkContext # We sometimes fail tasks due to unsynchronized clocks, so we should tolerate a fair number of # retries. # https://stackoverflow.com/questions/26260006/are-failed-tasks-resubmitted-in-apache-spark # NOTE: we never retry failed tests to avoid hiding bugs. This failure tolerance mechanism # is just for the resilience of the test framework itself. SparkContext.setSystemProperty('spark.task.maxFailures', str(SPARK_TASK_MAX_FAILURES)) spark_master_url = g_spark_master_url_override if spark_master_url is None: if is_macos(): logging.info("This is macOS, using the macOS Spark cluster") spark_master_url = SPARK_URLS['macos'] elif yb_dist_tests.global_conf.build_type in ['asan', 'tsan']: logging.info( "Using a separate Spark cluster for ASAN and TSAN tests") spark_master_url = SPARK_URLS['linux_asan_tsan'] else: logging.info( "Using the regular Spark cluster for non-ASAN/TSAN tests") spark_master_url = SPARK_URLS['linux_default'] logging.info("Spark master URL: %s", spark_master_url) spark_master_url = os.environ.get('YB_SPARK_MASTER_URL', spark_master_url) details += [ 'user: {}'.format(getpass.getuser()), 'build type: {}'.format(build_type) ] if 'BUILD_URL' in os.environ: details.append('URL: {}'.format(os.environ['BUILD_URL'])) spark_context = SparkContext(spark_master_url, "YB tests ({})".format(', '.join(details))) spark_context.addPyFile(yb_dist_tests.__file__)
def is_clock_synchronized(): assert not is_macos() result = command_util.run_program('ntpstat', error_ok=True) return ClockSyncCheckResult( is_synchronized=result.stdout.startswith('synchron'), cmd_result=result)