def run(self, threads): """Run OpenSSL three times. Args: threads (int): The total number of threads on the system. Returns: If success, a dict containing (unit, run1, run2, run3, average, median). unit (str): Score units. run1 (list): A list of (encrypt, decrypt). encrypt (float): The encryption score for the first run. decrypt (float): The decryption score for the first run. run2 (list): A list of (encrypt, decrypt). encrypt (float): The encryption score for the second run. decrypt (float): The decryption score for the second run. run3 (list): A list of (encrypt, decrypt). encrypt (float): The encryption score for the third run. decrypt (float): The decryption score for the third run. average (list): A list of (encrypt, decrypt). encrypt (float): The encryption average of run1, run2, and run3. decrypt (float): The decryption average of run1, run2, and run3. median (list): A list of (encrypt, decrypt). encrypt (float): The encryption median of run1, run2, and run3. decrypt (float): The decryption median of run1, run2, and run3. Else, a dict containing (error). error (str): Error message. """ taskset_ids = self.__taskset_ids(threads) multi_num = self.__multi_num(threads, taskset_ids) bin_loc = self.openssl_dir + "/apps/openssl" results = { "aes-128-gcm": { "unit": "B/s", "score_size": 8192, "score_size_unit": "B", "test_bit_size": 128, "test": "AES-GCM", }, "aes-256-gcm": { "unit": "B/s", "score_size": 8192, "score_size_unit": "B", "test_bit_size": 256, "test": "AES-GCM", }, } shell_env = os.environ.copy() if "LD_LIBRARY_PATH" in shell_env: shell_env["LD_LIBRARY_PATH"] = "{}:{}".format( shell_env["LD_LIBRARY_PATH"], self.openssl_dir) else: shell_env["LD_LIBRARY_PATH"] = self.openssl_dir if not os.path.isfile(bin_loc): text = 'Could not find OpenSSL binaries at "{}".'.format(bin_loc) prettify.error_message(text) return {"error": text} logging.info( "Running OpenSSL on ids %s using a total of %d threads.", taskset_ids, multi_num, ) os.makedirs(self.results_dir, exist_ok=True) for test in results: encrypt_results = [] decrypt_results = [] cmd_base = "taskset -c {} {} speed -multi {} -evp {}".format( taskset_ids, bin_loc, multi_num, test) cmd_decrypt = cmd_base + " -decrypt" self.commands.append("Run: " + cmd_base) self.commands.append("Run: " + cmd_decrypt) for count in range(1, 4): run_num = "run" + str(count) encrypt_result_file = "{}/openssl_{}_encrypt_{}.txt".format( self.results_dir, test, run_num) decrypt_result_file = "{}/openssl_{}_decrypt_{}.txt".format( self.results_dir, test, run_num) cmd_decrypt = cmd_base + " -decrypt" logging.debug("Encrypt command: %s", cmd_base) logging.debug("LD_LIBRARY_PATH: %s", shell_env["LD_LIBRARY_PATH"]) optimize.prerun() time.sleep(10) encrypt_output = execute.output(cmd_base, environment=shell_env) file.write(encrypt_result_file, encrypt_output) logging.debug("Decrypt command: %s", cmd_base) logging.debug("LD_LIBRARY_PATH: %s", shell_env["LD_LIBRARY_PATH"]) optimize.prerun() time.sleep(10) decrypt_output = execute.output(cmd_decrypt, environment=shell_env) file.write(decrypt_result_file, decrypt_output) encrypt_scores = encrypt_output.rstrip().split("\n") decrypt_scores = decrypt_output.rstrip().split("\n") if not encrypt_scores: continue if not decrypt_scores: continue encrypt_score = encrypt_scores[-1].split()[6] decrypt_score = decrypt_scores[-1].split()[6] if "k" in encrypt_score: encrypt_score = re.sub(r"[^0-9.]", "", encrypt_score) if "k" in decrypt_score: decrypt_score = re.sub(r"[^0-9.]", "", decrypt_score) # The 'numbers' are in 1000s of bytes per second processed. encrypt_score = float(encrypt_score) * 1000.0 decrypt_score = float(decrypt_score) * 1000.0 encrypt_results.append(encrypt_score) decrypt_results.append(decrypt_score) results[test][run_num] = {} results[test][run_num]["encrypt"] = encrypt_score results[test][run_num]["decrypt"] = decrypt_score if encrypt_results and decrypt_results: results[test]["average"] = {} results[test]["average"]["encrypt"] = statistics.mean( encrypt_results) results[test]["average"]["decrypt"] = statistics.mean( decrypt_results) results[test]["median"] = {} results[test]["median"]["encrypt"] = statistics.median( encrypt_results) results[test]["median"]["decrypt"] = statistics.median( decrypt_results) results[test]["variance"] = {} results[test]["variance"]["encrypt"] = statistics.variance( encrypt_results) results[test]["variance"]["decrypt"] = statistics.variance( decrypt_results) results[test]["range"] = {} sorted_encrypt = sorted(encrypt_results) results[test]["range"]["encrypt"] = (sorted_encrypt[-1] - sorted_encrypt[0]) sorted_decrypt = sorted(decrypt_results) results[test]["range"]["decrypt"] = (sorted_decrypt[-1] - sorted_decrypt[0]) logging.info("OpenSSL results: %s", str(results)) return results
def run(self, threads): """Run YCSB with MySQL three times. Args: threads (int): The number of threads on the system. """ shell_env = os.environ.copy() maven_dir = self.src_dir + "/maven" error = False results = {"unit": {"throughput": "ops/sec", "latency": "us"}} if os.path.isdir(maven_dir): shell_env["M2_HOME"] = maven_dir shell_env["M2"] = maven_dir + "/bin" else: return {"error": "Maven not found."} mysql_dir = self.src_dir + "/mysql" mysql_data = mysql_dir + "/mysql-files" if not os.path.exists(mysql_data + "/ycsb"): text = 'Unable to find "ycsb" table in MySQL.' prettify.error_message(text) return {"error": text} os.makedirs(self.results_dir, exist_ok=True) # Start MySQL service subprocess.Popen( "{0}/bin/mysqld_safe --user=root --basedir={0} --datadir={1} " "--plugin-dir={0}/lib/plugin --pid-file=/tmp/mysql.pid " "--log-error=ycsb.err &".format(mysql_dir, mysql_data), cwd=mysql_dir, shell=True, env=shell_env, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, ) time.sleep(20) read_latency_results = [] update_latency_results = [] throughput_results = [] run_cmd = ("./bin/ycsb run jdbc -s -P workloads/workloada -p " "db.driver=com.mysql.jdbc.Driver -p " "db.url=jdbc:mysql://localhost:3306/ycsb?useSSL=false -p " 'db.user=root -p db.passwd="" -threads {} -p ' "operationcount=1000000".format(threads)) self.commands.append("Run: " + run_cmd) for count in range(1, 4): run_num = "run" + str(count) result_file = "{}/ycsb-sql_{}.txt".format(self.results_dir, run_num) optimize.prerun() time.sleep(10) # Run YCSB output = execute.output(run_cmd, working_dir=self.ycsb_dir, environment=shell_env) file.write(result_file, output) if "UPDATE-FAILED" in output or "READ-FAILED" in output: error = True break throughput_line = grep.text(output, r"\[OVERALL\], Throughput\(ops/sec\),") if throughput_line: throughput = float(throughput_line[-1].split(",")[2].strip()) throughput_results.append(throughput) readlat_line = grep.text(output, r"\[READ\], 95thPercentileLatency\(us\),") if readlat_line: readlat = float(readlat_line[-1].split(",")[2].strip()) read_latency_results.append(readlat) updatelat_line = grep.text( output, r"\[UPDATE\], 95thPercentileLatency\(us\),") if updatelat_line: updatelat = float(updatelat_line[-1].split(",")[2].strip()) update_latency_results.append(updatelat) if throughput_line and readlat_line and updatelat_line: results[run_num] = { "throughput": throughput, "read_latency": readlat, "update_latency": updatelat, } # Stop MySQL service if os.path.exists("/tmp/mysql.pid"): pid = file.read("/tmp/mysql.pid").strip() execute.kill(pid) execute.kill(pid) execute.kill(pid) if error: return {"error": "YCSB failed to update and/or read database."} if "run1" in results: results["average"] = {} results["median"] = {} results["variance"] = {} results["range"] = {} results["average"]["throughput"] = statistics.mean( throughput_results) results["median"]["throughput"] = statistics.median( throughput_results) results["variance"]["throughput"] = statistics.variance( throughput_results) sorted_throughput = sorted(throughput_results) results["range"]["throughput"] = (sorted_throughput[-1] - sorted_throughput[0]) results["average"]["read_latency"] = statistics.mean( read_latency_results) results["median"]["read_latency"] = statistics.median( read_latency_results) results["variance"]["read_latency"] = statistics.variance( read_latency_results) sorted_read_latency = sorted(read_latency_results) results["range"]["read_latency"] = (sorted_read_latency[-1] - sorted_read_latency[0]) results["average"]["update_latency"] = statistics.mean( update_latency_results) results["median"]["update_latency"] = statistics.median( update_latency_results) results["variance"]["update_latency"] = statistics.variance( update_latency_results) sorted_update_latency = sorted(update_latency_results) results["range"]["update_latency"] = (sorted_update_latency[-1] - sorted_update_latency[0]) logging.info("YCSB MySQL results: %s", str(results)) return results
def run(self, cores=None, cflags=None): """Run three timed Linux kernel compilations. Args: cores (int, optional): The number of cores on the system. Returns: If success, a dict containing (unit, run1, run2, run3, average, median). unit (str): Score units. run1 (float): Score for the first run. run2 (float): Score for the second run. run3 (float): Score for the third run. average (float): Average of run1, run2, and run3. median (float): Median of run1, run2, and run3. Else, a dict containing (error). error (str): Error message. """ if cores is None: cores = 1 if cflags is None: cflags = "-march=native -mtune=native" if "-O" not in cflags: cflags += " -O3 " shell_env = os.environ.copy() shell_env["CFLAGS"] = cflags results = {"unit": "s"} config_loc = self.kernel_dir + "/.config" tmp_results = [] if not os.path.isfile(config_loc): text = ('Cannot run timed Linux kernel because "{}" could not ' "be found.".format(config_loc)) prettify.error_message(text) return {"error": text} logging.info( "Running timed Linux kernel compilation using %d Make " "thread.", cores) os.makedirs(self.results_dir, exist_ok=True) clean_cmd = "make -s -j {} clean".format(cores) build_cmd = "make -s -j {}".format(cores) self.commands.append("Run: CFLAGS = " + cflags) self.commands.append("Prerun: " + clean_cmd) self.commands.append("Run: " + build_cmd) for count in range(1, 4): run_num = "run" + str(count) result_file = "{}/zlib_{}.txt".format(self.results_dir, run_num) execute.output(clean_cmd, self.kernel_dir, environment=shell_env) optimize.prerun() time.sleep(10) compile_speed = execute.timed(build_cmd, working_dir=self.kernel_dir, environment=shell_env) if (not os.path.isfile(self.kernel_dir + "/vmlinux") or compile_speed is None): return {"error": "Linux Kernel failed to compile."} file.write( result_file, "{}\nLinux Kernel Compilation Speed: {}\n".format( build_cmd, compile_speed), ) results[run_num] = float(compile_speed) tmp_results.append(compile_speed) if tmp_results: results["average"] = statistics.mean(tmp_results) results["median"] = statistics.median(tmp_results) results["variance"] = statistics.variance(tmp_results) sorted_results = sorted(tmp_results) results["range"] = sorted_results[-1] - sorted_results[0] logging.info("Timed Linux kernel compilation results:\n%s", str(results)) return results
def run(self, threads): """Run YCSB with Cassandra three times. Args: threads (int): The number of threads on the system. """ pid = None shell_env = os.environ.copy() maven_dir = self.src_dir + "/maven" error = False results = {"unit": {"throughput": "ops/sec", "latency": "us"}} if os.path.isdir(maven_dir): shell_env["M2_HOME"] = maven_dir shell_env["M2"] = maven_dir + "/bin" else: prettify.error_message("Maven could not be found.") return False cassandra_dir = self.src_dir + "/cassandra" if not os.path.exists(cassandra_dir + "/data/data/ycsb"): text = 'Unable to find "ycsb" table in Cassandra.' prettify.error_message(text) return {"error": text} read_latency_results = [] update_latency_results = [] throughput_results = [] os.makedirs(self.results_dir, exist_ok=True) # Start Cassandra service subprocess.Popen( "./bin/cassandra -R -p /tmp/cassandra.pid &", shell=True, cwd=cassandra_dir, env=shell_env, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, ) time.sleep(20) if os.path.isfile("/tmp/cassandra.pid"): pid = file.read("/tmp/cassandra.pid").strip() if not pid or not os.path.dirname("/proc/" + pid): text = "Cassandra failed to start." prettify.error_message(text) return {"error": text} run_cmd = ("./bin/ycsb run cassandra-cql -s -P workloads/workloada " '-p hosts="localhost" -threads {} ' "-p operationcount=10000000".format(threads)) self.commands.append("Run: " + run_cmd) for count in range(1, 4): run_num = "run" + str(count) result_file = "{}/ycsb-nosql_{}.txt".format(self.results_dir, run_num) optimize.prerun() time.sleep(10) output = execute.output(run_cmd, working_dir=self.ycsb_dir, environment=shell_env) file.write(result_file, output) if "UPDATE-FAILED" in output or "READ-FAILED" in output: error = True break throughput_line = grep.text(output, r"\[OVERALL\], Throughput\(ops/sec\),") if throughput_line: throughput = float(throughput_line[-1].split(",")[2].strip()) throughput_results.append(throughput) readlat_line = grep.text(output, r"\[READ\], 95thPercentileLatency\(us\),") if readlat_line: readlat = float(readlat_line[-1].split(",")[2].strip()) read_latency_results.append(readlat) updatelat_line = grep.text( output, r"\[UPDATE\], 95thPercentileLatency\(us\),") if updatelat_line: updatelat = float(updatelat_line[-1].split(",")[2].strip()) update_latency_results.append(updatelat) if throughput_line and readlat_line and updatelat_line: results[run_num] = { "throughput": throughput, "read_latency": readlat, "update_latency": updatelat, } # Stop Cassandra service if pid: execute.kill(pid) execute.kill(pid) execute.kill(pid) if error: return {"error": "YCSB failed to update and/or read database."} if "run1" in results: results["average"] = {} results["median"] = {} results["variance"] = {} results["range"] = {} results["average"]["throughput"] = statistics.mean( throughput_results) results["median"]["throughput"] = statistics.median( throughput_results) results["variance"]["throughput"] = statistics.variance( throughput_results) sorted_throughput = sorted(throughput_results) results["range"]["throughput"] = (sorted_throughput[-1] - sorted_throughput[0]) results["average"]["read_latency"] = statistics.mean( read_latency_results) results["median"]["read_latency"] = statistics.median( read_latency_results) results["variance"]["read_latency"] = statistics.variance( read_latency_results) sorted_read_latency = sorted(read_latency_results) results["range"]["read_latency"] = (sorted_read_latency[-1] - sorted_read_latency[0]) results["average"]["update_latency"] = statistics.mean( update_latency_results) results["median"]["update_latency"] = statistics.median( update_latency_results) results["variance"]["update_latency"] = statistics.variance( update_latency_results) sorted_update_latency = sorted(update_latency_results) results["range"]["update_latency"] = (sorted_update_latency[-1] - sorted_update_latency[0]) logging.info("YCSB Cassandra results: %s", str(results)) return results
def run(self, threads): """Run GCC compiled STREAM three times. Args: threads (int): The total number of threads on the system. Returns: If success, a dict containing (unit, run1, run2, run3, average, median). unit (str): Score units. run1 (float): Score for the first run. run2 (float): Score for the second run. run3 (float): Score for the third run. average (float): Average of run1, run2, and run3. median (float): Median of run1, run2, and run3. Else, a dict containing (error). error (str): Error message. """ stream_bin = self.stream_dir + "/stream" shell_env = os.environ.copy() shell_env["OMP_NUM_THREADS"] = str(threads) mpi_root = self.src_dir + "/openmpi/build" mpi_path = mpi_root + "/bin" mpi_lib = mpi_root + "/lib" shell_env["PATH"] += ":" + mpi_path if "LD_LIBRARY_PATH" in shell_env: shell_env["LD_LIBRARY_PATH"] += mpi_lib else: shell_env["LD_LIBRARY_PATH"] = mpi_lib results = {"unit": "MB/s"} shell_env["OMP_PROC_BIND"] = "true" if not os.path.isfile(stream_bin): text = 'Cannot run STREAM because "{}" could not be found.'.format( stream_bin) prettify.error_message(text) return {"error": text} logging.info("Running STREAM with %d OMP threads.", threads) os.makedirs(self.results_dir, exist_ok=True) tmp_results = [] cmd = "./stream" self.commands.append("Run: OMP_NUM_THREADS = " + str(threads)) self.commands.append("Run: OMP_PROC_BIND = true") self.commands.append("Run: " + cmd) for count in range(1, 4): run_num = "run" + str(count) result_file = "{}/stream_{}.txt".format(self.results_dir, run_num) optimize.prerun() time.sleep(10) output = execute.output(cmd, working_dir=self.stream_dir, environment=shell_env) file.write(result_file, output) result = grep.text(output, "Triad") result = result[0].split()[1] # 2nd word result = float(result) results[run_num] = result tmp_results.append(result) results["average"] = statistics.mean(tmp_results) results["median"] = statistics.median(tmp_results) results["variance"] = statistics.variance(tmp_results) sorted_results = sorted(tmp_results) results["range"] = sorted_results[-1] - sorted_results[0] logging.info("STREAM results: %s", str(results)) return results
def run(self): """Run zlib compression (level 6) and decompression three times. Returns: If success, a dict containing (unit, run1, run2, run3, average, median). unit (str): Score units. run1 (list): A list of (compress, decompress). compress (float): The compression score for the first run. decompress (float): The decompression score for the first run. run2 (list): A list of (compress, decompress). compress (float): The compression score for the second run. decompress (float): The decompression score for the second run. run3 (list): A list of (compress, decompress). compress (float): The compression score for the third run. decompress (float): The decompression score for the third run. average (list): A list of (compress, decompress). compress (float): The compression average of run1, run2, and run3. decompress (float): The decompression average of run1, run2, and run3. median (list): A list of (compress, decompress). compress (float): The compression median of run1, run2, and run3. decompress (float): The decompression median of run1, run2, and run3. Else, a dict containing (error). error (str): Error message. """ bin32_loc = self.zlib_dir + "/minigzip" bin64_loc = self.zlib_dir + "/minigzip64" level = 6 corpus_file = self.corpus_dir + "/corpus.txt" corpus_archive = corpus_file + ".zlib" results = {"unit": "s"} compress_times = [] decompress_times = [] if not os.path.isfile(bin32_loc) or not os.path.isfile(bin64_loc): text = ('Cannot run zlib because neither "{}" or "{}" could not be' " found.".format(bin32_loc, bin64_loc)) prettify.error_message(text) return {"error": text} if not os.path.isfile(corpus_file): self.create_corpus() logging.info("Running zlib.") used_bin = bin64_loc if not os.path.isfile(bin64_loc): used_bin = bin32_loc os.makedirs(self.results_dir, exist_ok=True) compress_warmup = "{} -1 < {} > /dev/null".format(used_bin, corpus_file) compress_cmd = "{} -{} < {} > {}".format(used_bin, level, corpus_file, corpus_archive) decompress_warmup = "{} -d < {} > /dev/null".format( used_bin, corpus_archive) decompress_cmd = "{} -d < {} > /dev/null".format( used_bin, corpus_archive) self.commands.append("Run - Warmup: " + compress_warmup) self.commands.append("Run: " + compress_cmd) self.commands.append("Run - Warmup: " + decompress_warmup) self.commands.append("Run: " + decompress_cmd) for count in range(1, 4): run_num = "run" + str(count) result_file = "{}/zlib_{}.txt".format(self.results_dir, run_num) optimize.prerun() time.sleep(10) # warm up execute.output(compress_warmup, self.corpus_dir) compress_time = execute.timed(compress_cmd, self.corpus_dir) optimize.prerun() time.sleep(10) # warm up execute.output(decompress_warmup, self.corpus_dir) decompress_time = execute.timed(decompress_cmd, self.corpus_dir) file.write( result_file, "Compress Time (Level {}): {}\n" "Decompress Time: {}\n".format(level, compress_time, decompress_time), ) compress_times.append(compress_time) decompress_times.append(decompress_time) results[run_num] = {} results[run_num]["compress"] = compress_time results[run_num]["decompress"] = decompress_time os.remove(corpus_archive) results["average"] = {} results["average"]["compress"] = statistics.mean(compress_times) results["average"]["decompress"] = statistics.mean(decompress_times) results["median"] = {} results["median"]["compress"] = statistics.median(compress_times) results["median"]["decompress"] = statistics.median(decompress_times) results["variance"] = {} results["variance"]["compress"] = statistics.variance(compress_times) results["variance"]["decompress"] = statistics.variance( decompress_times) results["range"] = {} sorted_compress_times = sorted(compress_times) sorted_decompress_times = sorted(decompress_times) results["range"]["compress"] = (sorted_compress_times[-1] - sorted_compress_times[0]) results["range"]["decompress"] = (sorted_decompress_times[-1] - sorted_decompress_times[0]) logging.info("zlib results: %s", str(results)) return results
def run(self, cores=None, cflags=None): """Runs Docker containers to compile the Linux kernel. Returns: If success, a dict containing (unit, times, average, median, variance, range). unit (str): Score units. times (list): All compile times for the kernel. average (float): Average of the times. median (float): Median of the times. variance (float): Variance of the times. range (float): Range of the times. Else, a dict containing (error). error (str): Error message. """ if cores is None: cores = 1 if cflags is None: cflags = "-march=native -mtune=native" if "-O" not in cflags: cflags += " -O3 " shell_env = os.environ.copy() shell_env["CFLAGS"] = cflags shell_env["PATH"] = self.docker_dir + ":" + shell_env["PATH"] pid_file = "/tmp/docker.pid" build_name = "compile_kernel" result_file = self.results_dir + "/times.txt" results = {"unit": "s"} times = [] procs = [] os.makedirs(self.results_dir, exist_ok=True) shutil.copyfile(self.docker_dir + "/Dockerfile", self.results_dir + "/Dockerfile") if not os.path.isfile(self.docker_dir + "/dockerd"): message = "Cannot build. Docker directory not found." prettify.error_message(message) return {"error": message} # Start Docker daemon subprocess.Popen( "{}/dockerd --pidfile {} --data-root {} &".format( self.docker_dir, pid_file, self.data_dir), cwd=self.docker_dir, shell=True, env=shell_env, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, ) logging.info("Docker daemon is running.") time.sleep(20) if not self.__image_built(build_name, env=shell_env): if os.path.exists(pid_file): pid = file.read(pid_file).strip() execute.kill(pid) message = "Cannot build. Docker image not found." prettify.error_message(message) return {"error": message} logging.info("Docker is about to run.") # Remove all previously ran containers try: containers = execute.output( "{}/docker ps -a -q".format(self.docker_dir), working_dir=self.docker_dir, environment=shell_env, ) if containers: execute.output( "{0}/docker rm $({0}/docker ps -a -q)".format( self.docker_dir), working_dir=self.docker_dir, environment=shell_env, ) except subprocess.SubprocessError as err: logging.debug(err) optimize.prerun() time.sleep(10) for count in range(0, 100): test_name = build_name + "_test{}".format(count) # Note: We avoid using `-i -t` because it causes TTY issues # with SSH connections. run_command = ("{}/docker run --ulimit nofile=1048576:1048576 " '-e "cores={}" -e "cflags={}" --name {} {}'.format( self.docker_dir, cores, cflags, test_name, build_name)) if count == 0: self.commands.append("Run: " + run_command) proc = subprocess.Popen( run_command, shell=True, cwd=self.docker_dir, env=shell_env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True, ) procs.append(proc) for proc in procs: stdout = proc.communicate()[0] if isinstance(stdout, bytes): stdout = stdout.decode() stdout = stdout.strip() try: stdout = float(stdout) file.write(result_file, "{}\n".format(stdout), append=True) times.append(stdout) except ValueError: logging.debug("Container failed to finish.") logging.debug(stdout) # Remove all previously ran containers try: containers = execute.output( "{}/docker ps -a -q".format(self.docker_dir), working_dir=self.docker_dir, environment=shell_env, ) if containers: execute.output( "{0}/docker stop $({0}/docker ps -a -q)".format( self.docker_dir), working_dir=self.docker_dir, environment=shell_env, ) execute.output( "{0}/docker rm $({0}/docker ps -a -q)".format( self.docker_dir), working_dir=self.docker_dir, environment=shell_env, ) except subprocess.SubprocessError as err: logging.debug(err) # Stop Docker daemon if os.path.exists(pid_file): logging.info("Docker daemon is turning off.") pid = file.read(pid_file).strip() execute.kill(pid) execute.kill(pid) time.sleep(5) if times: results["times"] = times results["median"] = statistics.median(times) results["average"] = statistics.mean(times) results["variance"] = statistics.variance(times) sorted_times = sorted(times) results["range"] = sorted_times[-1] - sorted_times[0] else: results["error"] = "No container times available." return results
def run(self, mpi_threads, threads, arch=None): """Run High-Performance Linpack three times. Args: mpi_threads (int): The number of MPI threads used by LINPACK. This number is usually the number of physical cores on the system. threads (int): The total number of logical threads on the system. arch (str, optional): The architecture type of the system. Returns: If success, a dict containing (unit, run1, run2, run3, average, median). unit (str): Score units. run1 (float): Score for the first run. run2 (float): Score for the second run. run3 (float): Score for the third run. average (float): Average of run1, run2, and run3. median (float): Median of run1, run2, and run3. Else, a dict containing (error). error (str): Error message. """ if arch is None: arch = "x86_64" shell_env = os.environ.copy() openmpi_dir = "{}/openmpi/build/bin".format(self.src_dir) bin_dir = "{}/bin/{}".format(self.hpl_dir, arch) bin_loc = bin_dir + "/xhpl" results = {"unit": "GFLOPS", "mathlib": self.mathlib} tmp_results = [] if not os.path.isfile(bin_loc): text = 'Could not find HPL binaries at "{}".'.format(bin_loc) prettify.error_message(text) logging.error(text) return {"error": text} if not os.path.isdir(openmpi_dir): text = 'Could not find OpenMPI directory at "{}".'.format( openmpi_dir) prettify.error_message(text) logging.error(text) return {"error": text} grid = self.__grid(mpi_threads) nb_size = self.__nb_size(threads) mpi_cmd = "{}/mpirun -n {} --allow-run-as-root --mca mpi_paffinity_alone 1".format( openmpi_dir, mpi_threads) if threads == mpi_threads: mpi_cmd = "{}/mpirun -n {} --allow-run-as-root".format( openmpi_dir, mpi_threads) logging.info('Running LINPACK using "%s" arch.', arch) os.makedirs(self.results_dir, exist_ok=True) shutil.copyfile(self.hpl_dir + "/Make." + arch, self.results_dir) shutil.copyfile(self.hpl_dir + "/bin/{}/HPL.dat".format(arch), self.results_dir) cmd = mpi_cmd + " ./xhpl" self.commands.append("Run: " + cmd) optimize.prerun() time.sleep(10) output = execute.output(cmd, working_dir=bin_dir, environment=shell_env) file.write(self.results_dir + "/linpack_output.txt", output) result = grep.text( output, r"\s+{}\s+{}\s+{}\s+".format(nb_size, grid.P, grid.Q)) for line in result: # 7th word tmp = float(line.split()[6]) tmp_results.append(tmp) if tmp_results: sorted_results = sorted(tmp_results) results["score"] = sorted_results[-1] logging.info("LINPACK results: %s", str(results)) return results