def install(self, cores=None):
        """Installs glibc.

        Args:
            cores (int, optional): The number of cores on the system.

        Returns:
            Boolean: True if installation was successful otherwise False.
        """
        if cores is None:
            cores = 1

        bin_loc = "/usr/local/glibc/lib/ld-{}.so".format(self.version)
        build_dir = self.glibc_dir + "/build"

        if os.path.isfile(bin_loc):
            return True

        if not os.path.isdir(build_dir):
            prettify.error_message(
                'Cannot install glibc because "{}" could not be found.'.format(
                    build_dir))
            return False

        logging.info("Installing glibc using %d Make threads.", cores)

        execute.output("sudo -E make -j {} install".format(cores), build_dir)

        if os.path.isfile(bin_loc):
            return True
        return False
    def build(self, threads, cores=None, cflags=None, avx512=None):
        """Compiles OpenBLAS.

        Args:
            threads (int): The number of threads on the system.
            cores (int, optional): The number of cores on the system.
            cflags (str, optional): The CFLAGS for GCC.
            avx512 (bool, optional): Whether to enable AVX-512 CFLAGS.

        Returns:
            Boolean: True if compilation was successful otherwise False.
        """
        if cores is None:
            cores = 1
        if cflags is None:
            cflags = "-march=native -mtune=native"
        if "-O" not in cflags:
            cflags += " -O3 "
        if avx512 is True:
            cflags += (" -mavx512f -mavx512cd -mavx512bw -mavx512dq -mavx512vl"
                       " -mavx512ifma -mavx512vbmi ")

        bin_loc = self.openblas_dir + "/libopenblas.so"
        shell_env = os.environ.copy()
        shell_env["CFLAGS"] = cflags
        shell_env["OMP_NUM_THREADS"] = str(threads)
        openmpi_dir = self.src_dir + "/openmpi/build/bin"
        mpifort_bin = openmpi_dir + "/mpifort"
        mpicc_bin = openmpi_dir + "/mpicc"

        if os.path.isfile(bin_loc):
            return True

        if not os.path.isdir(self.openblas_dir):
            prettify.error_message(
                'Cannot compile OpenBLAS because "{}" could not be found.'.
                format(self.openblas_dir))
            return False

        logging.info(
            "Compiling OpenBLAS using %d OMP threads, %d Make threads, "
            'and "%s" CFLAGS.',
            threads,
            cores,
            cflags,
        )

        execute.output(
            "make -j {} FC={} CC={} USE_OPENMP=1 USE_THREAD=1".format(
                cores, mpifort_bin, mpicc_bin),
            self.openblas_dir,
            environment=shell_env,
        )

        if os.path.isfile(bin_loc):
            return True
        return False
    def build(self, cores=None, cflags=None):
        """Compiles glibc.

        Args:
            cores (int, optional): The number of cores on the system.
            cflags (str, optional): The CFLAGS for GCC.

        Returns:
            Boolean: True if compilation was successful otherwise False.
        """
        if cores is None:
            cores = 1
        if cflags is None:
            cflags = ""
        # `-ffast-math` cannot be used to compile glibc
        if "-Ofast" in cflags:
            cflags = cflags.replace("-Ofast", "")
        if "-ffast-math" in cflags:
            cflags = cflags.replace("-ffast-math", "")
        # `-O3` fails sometimes
        if "-O3" in cflags:
            cflags = cflags.replace("-O3", "")
        # Optimizations are needed for glibc
        if "-O" not in cflags:
            cflags += " -O2 "

        shell_env = os.environ.copy()
        shell_env["CFLAGS"] = cflags
        build_dir = self.glibc_dir + "/build"
        bin_loc = build_dir + "/libc.so"

        if os.path.isfile(bin_loc):
            return True

        if not os.path.isdir(self.glibc_dir):
            prettify.error_message(
                'Cannot compile glibc because "{}" could not be found.'.format(
                    self.glibc_dir))
            return False

        logging.info('Compiling glibc using %d Make threads and "%s" CFLAGS',
                     cores, cflags)

        os.makedirs(build_dir, exist_ok=True)

        execute.output(
            "spet.lib./configure --prefix=/usr/local/glibc",
            build_dir,
            environment=shell_env,
        )
        execute.output("make -j " + str(cores),
                       build_dir,
                       environment=shell_env)

        if os.path.isfile(bin_loc):
            return True
        return False
    def __node_latency_statistics(results):
        """Average and Median for each NUMA node.

        Args:
            results (dict): All results for a MLC run.

        Returns:
            Original results with the added |average| and |median| keys and
            values.
        """
        if "run1" not in results:
            prettify.error_message(
                "Cannot calculate the node statistics for MLC because the "
                "results list is empty.")
            return results

        averages = []
        medians = []
        variances = []
        ranges = []

        logging.debug("Computing node statistics.")

        for node in range(0, len(results["run1"])):
            latencies = []
            logging.debug("node: %s", repr(node))
            for run in results:
                logging.debug("run: %s", repr(run))
                if "run" not in run:
                    continue
                logging.debug("Appending: %s", repr(float(results[run][node])))
                latencies.append(float(results[run][node]))

            averages.append(statistics.mean(latencies))
            medians.append(statistics.median(latencies))
            variances.append(statistics.variance(latencies))
            sorted_latencies = sorted(latencies)
            ranges.append(sorted_latencies[-1] - sorted_latencies[0])

        logging.debug("Averages:\n%s", repr(averages))
        results["average"] = averages

        logging.debug("Medians:\n%s", repr(medians))
        results["median"] = medians

        logging.debug("Variances:\n%s", repr(variances))
        results["variance"] = variances

        logging.debug("Ranges:\n%s", repr(ranges))
        results["range"] = ranges

        return results
예제 #5
0
    def setup(self, cores=None, cflags=None):
        """Setup the Linux kernel config file.

        Args:
            cores (int, optional): The number of cores on the system.
            cflags (str, optional): The CFLAGS for GCC.

        Returns:
            Boolean: True if setup was successful otherwise False.
        """
        if cores is None:
            cores = 1
        if cflags is None:
            cflags = "-march=native -mtune=native"

        config_loc = self.kernel_dir + "/.config"
        shell_env = os.environ.copy()
        if "-O" not in cflags:
            cflags += " -O3 "
        shell_env["CFLAGS"] = cflags

        if os.path.isfile(config_loc):
            return True

        if not os.path.isdir(self.kernel_dir):
            prettify.error_message(
                'Cannot configure the Linux kernel because "{}" could not be'
                " found.".format(self.kernel_dir))
            return False

        logging.info(
            "Setting up the Linux kernel with %d Make threads, "
            'and "%s" CFLAGS.',
            cores,
            str(shell_env["CFLAGS"]),
        )

        cmd = "make -s -j {0} defconfig && make -s -j {0} clean".format(cores)

        output = execute.output(cmd,
                                working_dir=self.kernel_dir,
                                environment=shell_env)

        logging.debug("Build output:\n%s", output)

        self.commands.append("Setup: CFLAGS = " + cflags)
        self.commands.append("Setup: " + cmd)

        if os.path.isfile(config_loc):
            return True
        return False
def unknown(packages):
    """Unknown package manager.

    Args:
        packages (list): Package names for user to install.
    """
    logging.warning("Unknown package manager.")
    prettify.error_message(
        "The appropriate package manager for your system could not be found")
    print(
        "Please try manually installing the following and rerun this program:")

    for package in packages:
        print(package)
    def run(self):
        """Run MLC three times.

        Returns:
            If success: A dict containing (unit, run1, run2, run3, average,
            median).

                unit (str): Latency units.
                run1 (list): Latency for each NUMA node of the first run.
                run2 (list): Latency for each NUMA node of the second run.
                run3 (list): Latency for each NUMA node of the third run.
                average (list): Average for each NUMA node of run1, run2, and
                    run3.
                median (list): Median for each NUMA node of run1, run2, and
                    run3.

            If error: A dict containing (error).
                error (str): Error message.
        """
        bin_loc = self.mlc_dir + "/Linux/mlc_avx512"
        cmd = "modprobe msr; {} --latency_matrix".format(bin_loc)
        results = {"unit": "ns"}

        if not os.path.isfile(bin_loc):
            text = 'Cannot run MLC because "{}" could not be found.'.format(
                bin_loc)
            prettify.error_message(text)
            return {"error": text}

        os.makedirs(self.results_dir, exist_ok=True)
        self.commands.append("Run: " + cmd)

        output = execute.output(cmd, self.mlc_dir)

        file.write(self.results_dir + "/mlc_output.txt", output)

        found_lines = grep.text(output, r"^\s*0")

        if found_lines:
            node_latencies = found_lines[0].strip().split()
            node_latencies.pop(0)  # Remove leading '0' for first node
            for index, latency in enumerate(node_latencies):
                node_latencies[index] = float(latency)
            results["latencies"] = node_latencies

        logging.info("MLC results: %s", str(results))
        return results
예제 #8
0
    def build(self, cores=None, cflags=None):
        """Compiles zlib.

        Args:
            cores (int, optional): The number of cores on the system.
            cflags (str, optional): The CFLAGS for GCC.

        Returns:
            Boolean: True if compilation was successful otherwise False.
        """
        if cores is None:
            cores = 1
        if cflags is None:
            cflags = "-march=native -mtune=native"
        if "-O" not in cflags:
            cflags += " -O3 "

        bin32_loc = self.zlib_dir + "/minigzip"
        bin64_loc = self.zlib_dir + "/minigzip64"
        shell_env = os.environ.copy()

        shell_env["CFLAGS"] = cflags

        logging.debug("CFLAGS: %s", shell_env["CFLAGS"])

        if os.path.isfile(bin32_loc) or os.path.isfile(bin64_loc):
            return True

        if not os.path.isdir(self.zlib_dir):
            prettify.error_message('Cannot compile zlib because "{}" could '
                                   "not be found.".format(self.zlib_dir))
            return False

        logging.info('Compiling zlib with %d Make threads, and "%s" CFLAGS.',
                     cores, cflags)

        cmd = "./configure && make -j " + str(cores)

        self.commands.append("Build: CFLAGS = " + cflags)
        self.commands.append("Build: " + cmd)

        execute.output(cmd, self.zlib_dir, environment=shell_env)

        if not os.path.isfile(bin32_loc) or not os.path.isfile(bin64_loc):
            return False
        return True
    def build(self, cores=None, cflags=None):
        """Compiles OpenMPI.

        Args:
            cores (int, optional): The number of cores on the system.
            cflags (str, optional): The CFLAGS for GCC.

        Returns:
            Boolean: True if compilation was successful otherwise False.
        """
        if cores is None:
            cores = 1
        if cflags is None:
            cflags = "-march=native -mtune=native"
        if "-O" not in cflags:
            cflags += " -O3 "

        build_dir = self.mpi_dir + "/build"
        bin_loc = build_dir + "/bin/mpicc"
        shell_env = os.environ.copy()
        shell_env["CFLAGS"] = cflags

        if os.path.isfile(bin_loc):
            return True

        if not os.path.isdir(self.mpi_dir):
            prettify.error_message(
                'Cannot compile OpenMPI because "{}" could not be found.'.
                format(self.mpi_dir))
            return False

        logging.info(
            'Compiling OpenMPI using %d Make threads and "%s" CFLAGS.', cores,
            cflags)

        os.makedirs(build_dir, exist_ok=True)

        execute.output("../configure --prefix=" + build_dir,
                       build_dir,
                       environment=shell_env)
        execute.output("make -s -j {} all".format(cores),
                       build_dir,
                       environment=shell_env)

        return True
예제 #10
0
    def extract(self):
        """Extract YCSB.

        Returns:
            Boolean: True if extraction was successful otherwise False.
        """
        ycsb_archive_path = "{}/ycsb-{}.tar.gz".format(self.src_dir,
                                                       self.version)
        jconnect_dir = "{}/mysql-connector-java-{}".format(
            self.src_dir, self.jconnect_ver)
        jconnect_archive_path = jconnect_dir + ".tar.gz"
        jconn_final = self.src_dir + "/mysql-connector-java"

        if not os.path.isfile(ycsb_archive_path):
            prettify.error_message(
                'Cannot extract YCSB because "{}" could not be found.'.format(
                    ycsb_archive_path))
            return False

        if not os.path.isfile(jconnect_archive_path):
            prettify.error_message(
                'Cannot extract MySQL J Connector because "{}" could not be'
                " found.".format(jconnect_archive_path))
            return False

        if not os.path.isdir(self.ycsb_dir):
            logging.info("Extracting YCSB.")
            extract.tar(ycsb_archive_path, self.src_dir)
            os.rename("{}/ycsb-{}".format(self.src_dir, self.version),
                      self.ycsb_dir)
            logging.info("Extracting YCSB Complete.")

        if not os.path.isdir(jconn_final):
            logging.info("Extracting MySQL J Connector.")
            extract.tar(jconnect_archive_path, self.src_dir)
            os.rename(jconnect_dir, jconn_final)
            logging.info("Extracting MySQL J Connector Complete.")

        if os.path.isdir(self.ycsb_dir) and os.path.isdir(jconn_final):
            return True
        return False
예제 #11
0
    def extract(self):
        """Extract zlib.

        Returns:
            Boolean: True if extraction was successful otherwise False.
        """
        file_path = "{}/zlib-{}.tar.gz".format(self.src_dir, self.version)

        if os.path.isdir(self.zlib_dir):
            logging.debug('"%s" exists, exiting early.', self.zlib_dir)
            return True

        if not os.path.isfile(file_path):
            prettify.error_message('Cannot extract zlib because "{}" could '
                                   "not be found.".format(file_path))
            return False

        logging.info("Extracting zlib.")

        extract.tar(file_path, self.src_dir)

        logging.debug('Renaming "%s-%s" to "%s".', self.zlib_dir, self.version,
                      self.zlib_dir)
        os.rename("{}-{}".format(self.zlib_dir, self.version), self.zlib_dir)
    def extract(self):
        """Extract OpenMPI.

        Returns:
            Boolean: True if extraction was successful otherwise False.
        """
        file_path = "{}/openmpi-{}.tar.gz".format(self.src_dir, self.version)

        if os.path.isdir(self.mpi_dir):
            return True

        if not os.path.isfile(file_path):
            prettify.error_message(
                'Cannot extract OpenMPI because "{}" could not be found.'.
                format(file_path))
            return False

        logging.info("Extracting OpenMPI.")
        extract.tar(file_path, self.src_dir)
        os.rename("{}-{}".format(self.mpi_dir, self.version), self.mpi_dir)

        if os.path.isdir(self.mpi_dir):
            return True
        return False
예제 #13
0
    def download(self, url=None):
        """Download MKL.

        Returns:
            Boolean: True if download was successful otherwise False.
        """
        archive_path = "{}/l_mkl_{}.tgz".format(self.src_dir, self.version)

        if os.path.isfile(archive_path):
            return True

        if url is None:
            prettify.error_message(
                "Unable to find an URL for MKL. Please visit "
                "https://software.intel.com/en-us/mkl to download MKL and "
                "place the archive in the {} directory.".format(self.src_dir))
            return False

        logging.info("Downloading MKL.")
        download.file(url, archive_path)

        if os.path.isfile(archive_path):
            return True
        return False
예제 #14
0
    def install(self):
        """Install MKL.

        Returns:
            Boolean: True if installation was successful otherwise False.
        """

        if os.path.isdir("/opt/intel/mkl"):
            return True

        if not os.path.isdir(self.mkl_dir):
            prettify.error_message(
                'Cannot install MKL because "{}" could not be found.'.format(
                    self.mkl_dir))
            return False

        logging.info("Installing MKL.")

        execute.output('{}/install.sh --silent "{}/provided/silent.cfg"'.format(
            self.mkl_dir, self.src_dir))

        if os.path.isdir(self.mkl_dir):
            return True
        return False
    def build(self, threads, arch=None, cores=None, cflags=None, avx512=None):
        """Compiles High-Performance Linpack (HPL).

        Args:
            threads (int): The number of threads on the system.
            arch (str, optional): The architecture type of the system.
            cores (int, optional): The number of cores on the system.
            cflags (str, optional): The CFLAGS for GCC.
            avx512 (bool, optional): If AVX-512 instructions should be added to
                                     the CFLAGS.

        Returns:
            Boolean: True if compilation was successful otherwise False.
        """
        if arch is None:
            arch = "x86_64"
        if cores is None:
            cores = 1
        if cflags is None:
            cflags = "-march=native -mtune=native"
        if avx512 is True:
            cflags += (" -mavx512f -mavx512cd -mavx512bw -mavx512dq -mavx512vl"
                       " -mavx512ifma -mavx512vbmi ")
        if "-O" not in cflags:
            cflags += " -O3 "

        shell_env = os.environ.copy()
        shell_env["CFLAGS"] = cflags
        shell_env["OMP_NUM_THREADS"] = str(threads)
        bin_file = "{}/bin/{}/xhpl".format(self.hpl_dir, arch)
        mpicc_bin = self.src_dir + "/openmpi/build/bin/mpicc"
        makefile = self.hpl_dir + "/Make." + arch

        if os.path.isfile(bin_file):
            return True

        if not os.path.isdir(self.hpl_dir):
            text = 'Cannot compile LINPACK because "{}" could not ' "be found.".format(
                self.hpl_dir)
            prettify.error_message(text)
            logging.error(text)
            return False

        if not os.path.isfile(makefile):
            text = 'Cannot compile LINPACK because "{}" could not ' "be found.".format(
                makefile)
            prettify.error_message(text)
            logging.error(text)
            return False

        if not os.path.isfile(mpicc_bin):
            text = 'Cannot compile LINPACK because "{}" could not ' "be found.".format(
                mpicc_bin)
            prettify.error_message(text)
            logging.error(text)
            return False

        logging.info(
            'Compiling LINPACK using %s OMP threads, "%s" arch,'
            ' %d Make threads, and "%s" CFLAGS.',
            shell_env["OMP_NUM_THREADS"],
            arch,
            cores,
            shell_env["CFLAGS"],
        )

        # Sometimes building has an issue on the first run and doesn't build
        # but the second go-round is perfectly fine
        build_cmd = ("make -s -j {0} all arch={1} || make -s -j {0} all "
                     "arch={1}".format(cores, arch))
        install_cmd = "make -s -j {} install arch={}".format(cores, arch)

        self.commands.append("Build: CFLAGS = " + cflags)
        self.commands.append("Build: OMP_NUM_THREADS = " + str(threads))
        self.commands.append("Build: " + build_cmd)
        self.commands.append("Install: " + install_cmd)

        execute.output(build_cmd,
                       working_dir=self.hpl_dir,
                       environment=shell_env)

        execute.output(install_cmd,
                       working_dir=self.hpl_dir,
                       environment=shell_env)

        if os.path.isfile(bin_file):
            return True
        return False
예제 #16
0
    def run(self, cores=None, cflags=None):
        """Run three timed Linux kernel compilations.

        Args:
            cores (int, optional): The number of cores on the system.

        Returns:
            If success, a dict containing (unit, run1, run2, run3, average,
            median).

                unit (str): Score units.
                run1 (float): Score for the first run.
                run2 (float): Score for the second run.
                run3 (float): Score for the third run.
                average (float): Average of run1, run2, and run3.
                median (float): Median of run1, run2, and run3.

            Else, a dict containing (error).

                error (str): Error message.
        """
        if cores is None:
            cores = 1
        if cflags is None:
            cflags = "-march=native -mtune=native"
        if "-O" not in cflags:
            cflags += " -O3 "
        shell_env = os.environ.copy()
        shell_env["CFLAGS"] = cflags

        results = {"unit": "s"}
        config_loc = self.kernel_dir + "/.config"
        tmp_results = []

        if not os.path.isfile(config_loc):
            text = ('Cannot run timed Linux kernel because "{}" could not '
                    "be found.".format(config_loc))
            prettify.error_message(text)
            return {"error": text}

        logging.info(
            "Running timed Linux kernel compilation using %d Make "
            "thread.", cores)

        os.makedirs(self.results_dir, exist_ok=True)

        clean_cmd = "make -s -j {} clean".format(cores)
        build_cmd = "make -s -j {}".format(cores)
        self.commands.append("Run: CFLAGS = " + cflags)
        self.commands.append("Prerun: " + clean_cmd)
        self.commands.append("Run: " + build_cmd)

        for count in range(1, 4):
            run_num = "run" + str(count)
            result_file = "{}/zlib_{}.txt".format(self.results_dir, run_num)

            execute.output(clean_cmd, self.kernel_dir, environment=shell_env)

            optimize.prerun()
            time.sleep(10)

            compile_speed = execute.timed(build_cmd,
                                          working_dir=self.kernel_dir,
                                          environment=shell_env)

            if (not os.path.isfile(self.kernel_dir + "/vmlinux")
                    or compile_speed is None):
                return {"error": "Linux Kernel failed to compile."}

            file.write(
                result_file,
                "{}\nLinux Kernel Compilation Speed:  {}\n".format(
                    build_cmd, compile_speed),
            )

            results[run_num] = float(compile_speed)
            tmp_results.append(compile_speed)

        if tmp_results:
            results["average"] = statistics.mean(tmp_results)
            results["median"] = statistics.median(tmp_results)
            results["variance"] = statistics.variance(tmp_results)
            sorted_results = sorted(tmp_results)
            results["range"] = sorted_results[-1] - sorted_results[0]

        logging.info("Timed Linux kernel compilation results:\n%s",
                     str(results))

        return results
예제 #17
0
    def run(self, l1_cache, l2_cache, l3_cache, arch=None, threads=None):
        """Run High-Performance Linpack three times.

        Args:
            l1_cache (int): The L1-Cache size in B for the system.
            l2_cache (int): The L2-Cache size in B for the system.
            l3_cache (int): The L3-Cache size in B for the system.
            arch (str, optional): The architecture type of the system.
            threads (int): The number of threads on the system.

        Returns:
            If success, a dict containing (unit, run1, run2, run3, average,
            median).

                unit (str): Latency units.
                level1 (float): Latency for L1-Cache.
                level2 (float): Latency for L2-Cache.
                level3 (float): Latency for L3-Cache.

            Else, a dict containing (error).

                error (str): Error message.
        """
        if arch is None:
            arch = "x86_64"
        if threads is None:
            threads = 1

        thread = 0
        stride = 1024
        max_cache = 512
        bin_loc = "{}/bin/{}-linux-gnu/lat_mem_rd".format(
            self.lmbench_dir, arch)
        results = {"unit": "ns"}

        if not os.path.isfile(bin_loc):
            text = 'Could not find LMbench binaries at "{}".'.format(bin_loc)
            prettify.error_message(text)
            logging.error(text)
            return {"error": text}

        logging.info(
            "Running LMbench using %d L1-Cache, %d L2-Cache, "
            '%d L3-Cache, and "%s" arch.',
            l1_cache,
            l2_cache,
            l3_cache,
            arch,
        )

        os.makedirs(self.results_dir, exist_ok=True)

        if threads >= 3:
            thread = 2

        run_command = "taskset -c {} {} {} {}".format(thread, bin_loc,
                                                      max_cache, stride)

        self.commands.append("Run: " + run_command)

        result_file = self.results_dir + "/lmbench_output.txt"
        output = execute.output(run_command)
        file.write(result_file, output)

        l1_latency = self.__closest_cache_latency(
            float(l1_cache) / 1024.0 / 1024.0, output)

        if l2_cache:
            l2_latency = self.__closest_cache_latency(
                float(l2_cache) / 1024.0 / 1024.0, output)

        if l3_cache:
            l3_latency = self.__closest_cache_latency(
                float(l3_cache) / 1024.0 / 1024.0, output)

        if l1_latency:
            results["level1"] = float(l1_latency)
        if l2_latency:
            results["level2"] = float(l2_latency)
        if l3_latency:
            results["level3"] = float(l3_latency)

        logging.info("LMbench results: %s", str(results))
        return results
예제 #18
0
    def run(self, threads):
        """Run YCSB with MySQL three times.

        Args:
            threads (int): The number of threads on the system.
        """
        shell_env = os.environ.copy()
        maven_dir = self.src_dir + "/maven"
        error = False
        results = {"unit": {"throughput": "ops/sec", "latency": "us"}}

        if os.path.isdir(maven_dir):
            shell_env["M2_HOME"] = maven_dir
            shell_env["M2"] = maven_dir + "/bin"
        else:
            return {"error": "Maven not found."}

        mysql_dir = self.src_dir + "/mysql"
        mysql_data = mysql_dir + "/mysql-files"

        if not os.path.exists(mysql_data + "/ycsb"):
            text = 'Unable to find "ycsb" table in MySQL.'
            prettify.error_message(text)
            return {"error": text}

        os.makedirs(self.results_dir, exist_ok=True)

        # Start MySQL service
        subprocess.Popen(
            "{0}/bin/mysqld_safe --user=root --basedir={0} --datadir={1} "
            "--plugin-dir={0}/lib/plugin --pid-file=/tmp/mysql.pid "
            "--log-error=ycsb.err &".format(mysql_dir, mysql_data),
            cwd=mysql_dir,
            shell=True,
            env=shell_env,
            stdout=subprocess.DEVNULL,
            stderr=subprocess.DEVNULL,
        )
        time.sleep(20)

        read_latency_results = []
        update_latency_results = []
        throughput_results = []

        run_cmd = ("./bin/ycsb run jdbc -s -P workloads/workloada -p "
                   "db.driver=com.mysql.jdbc.Driver -p "
                   "db.url=jdbc:mysql://localhost:3306/ycsb?useSSL=false -p "
                   'db.user=root -p db.passwd="" -threads {} -p '
                   "operationcount=1000000".format(threads))

        self.commands.append("Run: " + run_cmd)

        for count in range(1, 4):
            run_num = "run" + str(count)
            result_file = "{}/ycsb-sql_{}.txt".format(self.results_dir, run_num)

            optimize.prerun()
            time.sleep(10)

            # Run YCSB
            output = execute.output(run_cmd,
                                    working_dir=self.ycsb_dir,
                                    environment=shell_env)

            file.write(result_file, output)

            if "UPDATE-FAILED" in output or "READ-FAILED" in output:
                error = True
                break

            throughput_line = grep.text(output,
                                        r"\[OVERALL\], Throughput\(ops/sec\),")
            if throughput_line:
                throughput = float(throughput_line[-1].split(",")[2].strip())
                throughput_results.append(throughput)

            readlat_line = grep.text(output,
                                     r"\[READ\], 95thPercentileLatency\(us\),")
            if readlat_line:
                readlat = float(readlat_line[-1].split(",")[2].strip())
                read_latency_results.append(readlat)

            updatelat_line = grep.text(
                output, r"\[UPDATE\], 95thPercentileLatency\(us\),")
            if updatelat_line:
                updatelat = float(updatelat_line[-1].split(",")[2].strip())
                update_latency_results.append(updatelat)

            if throughput_line and readlat_line and updatelat_line:
                results[run_num] = {
                    "throughput": throughput,
                    "read_latency": readlat,
                    "update_latency": updatelat,
                }

        # Stop MySQL service
        if os.path.exists("/tmp/mysql.pid"):
            pid = file.read("/tmp/mysql.pid").strip()
            execute.kill(pid)
            execute.kill(pid)
            execute.kill(pid)

        if error:
            return {"error": "YCSB failed to update and/or read database."}

        if "run1" in results:
            results["average"] = {}
            results["median"] = {}
            results["variance"] = {}
            results["range"] = {}

            results["average"]["throughput"] = statistics.mean(
                throughput_results)
            results["median"]["throughput"] = statistics.median(
                throughput_results)
            results["variance"]["throughput"] = statistics.variance(
                throughput_results)
            sorted_throughput = sorted(throughput_results)
            results["range"]["throughput"] = (sorted_throughput[-1] -
                                              sorted_throughput[0])

            results["average"]["read_latency"] = statistics.mean(
                read_latency_results)
            results["median"]["read_latency"] = statistics.median(
                read_latency_results)
            results["variance"]["read_latency"] = statistics.variance(
                read_latency_results)
            sorted_read_latency = sorted(read_latency_results)
            results["range"]["read_latency"] = (sorted_read_latency[-1] -
                                                sorted_read_latency[0])

            results["average"]["update_latency"] = statistics.mean(
                update_latency_results)
            results["median"]["update_latency"] = statistics.median(
                update_latency_results)
            results["variance"]["update_latency"] = statistics.variance(
                update_latency_results)
            sorted_update_latency = sorted(update_latency_results)
            results["range"]["update_latency"] = (sorted_update_latency[-1] -
                                                  sorted_update_latency[0])

        logging.info("YCSB MySQL results: %s", str(results))

        return results
예제 #19
0
    def setup(self, threads):
        """Setup Cassandra's "ycsb" table and load basic records.

        Args:
            threads (int): The number of threads on the system.
        """
        pid = None
        shell_env = os.environ.copy()
        maven_dir = self.src_dir + "/maven"

        if os.path.isdir(maven_dir):
            shell_env["M2_HOME"] = maven_dir
            shell_env["M2"] = maven_dir + "/bin"
        else:
            return False

        cassandra_dir = self.src_dir + "/cassandra"

        if not os.path.isdir(cassandra_dir):
            prettify.error_message(
                'Cannot start Cassandra because "{}" could not be found.'.
                format(cassandra_dir))
            return False

        if os.path.exists(self.src_dir + "/cassandra/data/data/ycsb"):
            logging.debug('Skipping Cassandra setup because the "ycsb" table '
                          "already exists.")
            return True

        # Start Cassandra service
        subprocess.Popen(
            "./bin/cassandra -R -p /tmp/cassandra.pid &",
            shell=True,
            cwd=cassandra_dir,
            env=shell_env,
            stdout=subprocess.DEVNULL,
            stderr=subprocess.DEVNULL,
        )
        time.sleep(20)

        if os.path.isfile("/tmp/cassandra.pid"):
            pid = file.read("/tmp/cassandra.pid").strip()

        if not pid or not os.path.dirname("/proc/" + pid):
            text = "Cassandra failed to start."
            prettify.error_message(text)
            return False

        # Setup table schema
        execute.output(
            "./bin/cqlsh -f {}/provided/create-table.cql".format(self.src_dir),
            working_dir=cassandra_dir,
            environment=shell_env,
        )

        load_cmd = ("./bin/ycsb load cassandra-cql -s -P workloads/workloada "
                    '-p hosts="localhost" -threads {} -p recordcount=10000000'.
                    format(threads))

        self.commands.append("Load: " + load_cmd)

        # Load YCSB records
        execute.output(load_cmd,
                       working_dir=self.ycsb_dir,
                       environment=shell_env)

        # Stop Cassandra service
        if pid:
            execute.kill(pid)
            execute.kill(pid)
            execute.kill(pid)

        return True
예제 #20
0
    def build(self, arch=None, cores=None, cflags=None):
        """Compiles LMbench.

        Args:
            arch (str, optional): The architecture type of the system.
            cores (int, optional): The number of cores on the system.
            cflags (str, optional): The CFLAGS for GCC.

        Returns:
            Boolean: True if compilation was successful otherwise False.
        """
        if arch is None:
            arch = "x86_64"
        if cores is None:
            cores = 1
        if cflags is None:
            cflags = "-march=native -mtune=native"
        if "-O" not in cflags:
            cflags += " -O3 "

        bin_loc = "{}/bin/{}-linux-gnu/lat_mem_rd".format(
            self.lmbench_dir, arch)
        shell_env = os.environ.copy()
        shell_env["CFLAGS"] = cflags
        sccs_dir = self.lmbench_dir + "/SCCS"
        sccs_file = sccs_dir + "/s.ChangeSet"

        if os.path.isfile(bin_loc):
            return True

        if not os.path.isdir(self.lmbench_dir):
            text = 'Cannot compile LMbench because "{}" could not be ' "found.".format(
                self.lmbench_dir)
            prettify.error_message(text)
            logging.error(text)
            return False

        logging.info(
            'Compiling LMbench using "%s" arch, %d Make threads,'
            ' and "%s" CFLAGS.',
            arch,
            cores,
            shell_env["CFLAGS"],
        )

        # This file creates errors if it is not present
        if not os.path.exists(sccs_file):
            os.makedirs(sccs_dir, exist_ok=True)
            file.touch(sccs_file)

        cmd = "make -s -j {}".format(cores)

        self.commands.append("Build: CFLAGS = " + cflags)
        self.commands.append("Build: " + cmd)

        execute.output(cmd,
                       working_dir=self.lmbench_dir,
                       environment=shell_env)

        if os.path.isfile(bin_loc):
            return True
        return False
예제 #21
0
    def setup(self, threads):
        """Setup YCSB J Connector, MySQL's "ycsb" table, and load basic records.

        Args:
            threads (int): The number of threads on the system.
        """
        shell_env = os.environ.copy()
        maven_dir = self.src_dir + "/maven"

        if os.path.isdir(maven_dir):
            shell_env["M2_HOME"] = maven_dir
            shell_env["M2"] = maven_dir + "/bin"
        else:
            prettify.error_message("Maven could not be found.")
            return False

        mysql_dir = self.src_dir + "/mysql"
        mysql_data = mysql_dir + "/mysql-files"

        jconnect_jar = "mysql-connector-java-{}-bin.jar".format(
            self.jconnect_ver)
        jconnect_path = "{}/mysql-connector-java/{}".format(
            self.src_dir, jconnect_jar)
        jdbc_binding_path = "{}/jdbc-binding/lib/{}".format(
            self.ycsb_dir, jconnect_jar)

        if not os.path.isdir(mysql_dir):
            prettify.error_message(
                'Cannot start MySQL because "{}" could not be found.'.format(
                    mysql_dir))
            return False

        if os.path.exists(mysql_data + "/ycsb"):
            logging.debug('Skipping MySQL setup because the "ycsb" table '
                          "already exists.")
            return True

        # Start MySQL service
        subprocess.Popen(
            "{0}/bin/mysqld_safe --user=root --basedir={0} --datadir={1} "
            "--plugin-dir={0}/lib/plugin --pid-file=/tmp/mysql.pid "
            "--log-error=ycsb.err &".format(mysql_dir, mysql_data),
            cwd=mysql_dir,
            shell=True,
            env=shell_env,
            stdout=subprocess.DEVNULL,
            stderr=subprocess.DEVNULL,
        )
        time.sleep(20)

        # Setup ycsb database
        schema_output = execute.output(
            "./bin/mysql -uroot --skip-password < "
            "{}/provided/create-table.mysql".format(self.src_dir),
            working_dir=mysql_dir,
            environment=shell_env,
        )

        logging.debug(schema_output)
        if os.path.isfile(mysql_data + "/ycsb.err"):
            logging.debug(file.read(mysql_data + "/ycsb.err"))

        shutil.copyfile(jconnect_path, jdbc_binding_path)

        # Load YCSB records
        load_cmd = ("./bin/ycsb load jdbc -s -P workloads/workloada -p "
                    "db.driver=com.mysql.jdbc.Driver -p "
                    "db.url=jdbc:mysql://localhost:3306/ycsb?useSSL=false -p "
                    'db.user=root -p db.passwd="" -threads {} '
                    "-p recordcount=1000000".format(threads))

        self.commands.append("Load: " + load_cmd)

        load_ycsb = execute.output(
            "./bin/ycsb load jdbc -s -P workloads/workloada -p "
            "db.driver=com.mysql.jdbc.Driver -p "
            "db.url=jdbc:mysql://localhost:3306/ycsb?useSSL=false -p "
            'db.user=root -p db.passwd="" -threads {} -p recordcount=1000000'.
            format(threads),
            working_dir=self.ycsb_dir,
            environment=shell_env,
        )

        logging.debug(load_ycsb)
        if os.path.isfile(mysql_data + "/ycsb.err"):
            logging.debug(file.read(mysql_data + "/ycsb.err"))

        # Stop MySQL service
        if os.path.exists("/tmp/mysql.pid"):
            pid = file.read("/tmp/mysql.pid").strip()
            execute.kill(pid)
            execute.kill(pid)
            execute.kill(pid)
    def run(self, cores=None, cflags=None):
        """Runs Docker containers to compile the Linux kernel.

        Returns:
            If success, a dict containing (unit, times, average, median,
                variance, range).

                unit (str): Score units.
                times (list): All compile times for the kernel.
                average (float): Average of the times.
                median (float): Median of the times.
                variance (float): Variance of the times.
                range (float): Range of the times.

            Else, a dict containing (error).

                error (str): Error message.
        """
        if cores is None:
            cores = 1
        if cflags is None:
            cflags = "-march=native -mtune=native"
        if "-O" not in cflags:
            cflags += " -O3 "
        shell_env = os.environ.copy()
        shell_env["CFLAGS"] = cflags
        shell_env["PATH"] = self.docker_dir + ":" + shell_env["PATH"]

        pid_file = "/tmp/docker.pid"
        build_name = "compile_kernel"
        result_file = self.results_dir + "/times.txt"
        results = {"unit": "s"}
        times = []
        procs = []

        os.makedirs(self.results_dir, exist_ok=True)
        shutil.copyfile(self.docker_dir + "/Dockerfile",
                        self.results_dir + "/Dockerfile")

        if not os.path.isfile(self.docker_dir + "/dockerd"):
            message = "Cannot build. Docker directory not found."
            prettify.error_message(message)
            return {"error": message}

        # Start Docker daemon
        subprocess.Popen(
            "{}/dockerd --pidfile {} --data-root {} &".format(
                self.docker_dir, pid_file, self.data_dir),
            cwd=self.docker_dir,
            shell=True,
            env=shell_env,
            stdout=subprocess.DEVNULL,
            stderr=subprocess.DEVNULL,
        )
        logging.info("Docker daemon is running.")
        time.sleep(20)

        if not self.__image_built(build_name, env=shell_env):
            if os.path.exists(pid_file):
                pid = file.read(pid_file).strip()
                execute.kill(pid)
            message = "Cannot build. Docker image not found."
            prettify.error_message(message)
            return {"error": message}

        logging.info("Docker is about to run.")

        # Remove all previously ran containers
        try:
            containers = execute.output(
                "{}/docker ps -a -q".format(self.docker_dir),
                working_dir=self.docker_dir,
                environment=shell_env,
            )
            if containers:
                execute.output(
                    "{0}/docker rm $({0}/docker ps -a -q)".format(
                        self.docker_dir),
                    working_dir=self.docker_dir,
                    environment=shell_env,
                )
        except subprocess.SubprocessError as err:
            logging.debug(err)

        optimize.prerun()
        time.sleep(10)

        for count in range(0, 100):
            test_name = build_name + "_test{}".format(count)
            # Note: We avoid using `-i -t` because it causes TTY issues
            #       with SSH connections.
            run_command = ("{}/docker run --ulimit nofile=1048576:1048576 "
                           '-e "cores={}" -e "cflags={}" --name {} {}'.format(
                               self.docker_dir, cores, cflags, test_name,
                               build_name))
            if count == 0:
                self.commands.append("Run: " + run_command)

            proc = subprocess.Popen(
                run_command,
                shell=True,
                cwd=self.docker_dir,
                env=shell_env,
                stdout=subprocess.PIPE,
                stderr=subprocess.STDOUT,
                universal_newlines=True,
            )
            procs.append(proc)

        for proc in procs:
            stdout = proc.communicate()[0]
            if isinstance(stdout, bytes):
                stdout = stdout.decode()
            stdout = stdout.strip()
            try:
                stdout = float(stdout)
                file.write(result_file, "{}\n".format(stdout), append=True)
                times.append(stdout)
            except ValueError:
                logging.debug("Container failed to finish.")
                logging.debug(stdout)

        # Remove all previously ran containers
        try:
            containers = execute.output(
                "{}/docker ps -a -q".format(self.docker_dir),
                working_dir=self.docker_dir,
                environment=shell_env,
            )
            if containers:
                execute.output(
                    "{0}/docker stop $({0}/docker ps -a -q)".format(
                        self.docker_dir),
                    working_dir=self.docker_dir,
                    environment=shell_env,
                )
                execute.output(
                    "{0}/docker rm $({0}/docker ps -a -q)".format(
                        self.docker_dir),
                    working_dir=self.docker_dir,
                    environment=shell_env,
                )
        except subprocess.SubprocessError as err:
            logging.debug(err)

        # Stop Docker daemon
        if os.path.exists(pid_file):
            logging.info("Docker daemon is turning off.")
            pid = file.read(pid_file).strip()
            execute.kill(pid)
            execute.kill(pid)
            time.sleep(5)

        if times:
            results["times"] = times
            results["median"] = statistics.median(times)
            results["average"] = statistics.mean(times)
            results["variance"] = statistics.variance(times)
            sorted_times = sorted(times)
            results["range"] = sorted_times[-1] - sorted_times[0]
        else:
            results["error"] = "No container times available."

        return results
예제 #23
0
    def run(self, threads):
        """Run YCSB with Cassandra three times.

        Args:
            threads (int): The number of threads on the system.
        """
        pid = None
        shell_env = os.environ.copy()
        maven_dir = self.src_dir + "/maven"
        error = False
        results = {"unit": {"throughput": "ops/sec", "latency": "us"}}

        if os.path.isdir(maven_dir):
            shell_env["M2_HOME"] = maven_dir
            shell_env["M2"] = maven_dir + "/bin"
        else:
            prettify.error_message("Maven could not be found.")
            return False

        cassandra_dir = self.src_dir + "/cassandra"

        if not os.path.exists(cassandra_dir + "/data/data/ycsb"):
            text = 'Unable to find "ycsb" table in Cassandra.'
            prettify.error_message(text)
            return {"error": text}

        read_latency_results = []
        update_latency_results = []
        throughput_results = []

        os.makedirs(self.results_dir, exist_ok=True)

        # Start Cassandra service
        subprocess.Popen(
            "./bin/cassandra -R -p /tmp/cassandra.pid &",
            shell=True,
            cwd=cassandra_dir,
            env=shell_env,
            stdout=subprocess.DEVNULL,
            stderr=subprocess.DEVNULL,
        )
        time.sleep(20)

        if os.path.isfile("/tmp/cassandra.pid"):
            pid = file.read("/tmp/cassandra.pid").strip()

        if not pid or not os.path.dirname("/proc/" + pid):
            text = "Cassandra failed to start."
            prettify.error_message(text)
            return {"error": text}

        run_cmd = ("./bin/ycsb run cassandra-cql -s -P workloads/workloada "
                   '-p hosts="localhost" -threads {} '
                   "-p operationcount=10000000".format(threads))

        self.commands.append("Run: " + run_cmd)

        for count in range(1, 4):
            run_num = "run" + str(count)
            result_file = "{}/ycsb-nosql_{}.txt".format(self.results_dir,
                                                        run_num)

            optimize.prerun()
            time.sleep(10)

            output = execute.output(run_cmd,
                                    working_dir=self.ycsb_dir,
                                    environment=shell_env)

            file.write(result_file, output)

            if "UPDATE-FAILED" in output or "READ-FAILED" in output:
                error = True
                break

            throughput_line = grep.text(output,
                                        r"\[OVERALL\], Throughput\(ops/sec\),")
            if throughput_line:
                throughput = float(throughput_line[-1].split(",")[2].strip())
                throughput_results.append(throughput)

            readlat_line = grep.text(output,
                                     r"\[READ\], 95thPercentileLatency\(us\),")
            if readlat_line:
                readlat = float(readlat_line[-1].split(",")[2].strip())
                read_latency_results.append(readlat)

            updatelat_line = grep.text(
                output, r"\[UPDATE\], 95thPercentileLatency\(us\),")
            if updatelat_line:
                updatelat = float(updatelat_line[-1].split(",")[2].strip())
                update_latency_results.append(updatelat)

            if throughput_line and readlat_line and updatelat_line:
                results[run_num] = {
                    "throughput": throughput,
                    "read_latency": readlat,
                    "update_latency": updatelat,
                }

        # Stop Cassandra service
        if pid:
            execute.kill(pid)
            execute.kill(pid)
            execute.kill(pid)

        if error:
            return {"error": "YCSB failed to update and/or read database."}

        if "run1" in results:
            results["average"] = {}
            results["median"] = {}
            results["variance"] = {}
            results["range"] = {}

            results["average"]["throughput"] = statistics.mean(
                throughput_results)
            results["median"]["throughput"] = statistics.median(
                throughput_results)
            results["variance"]["throughput"] = statistics.variance(
                throughput_results)
            sorted_throughput = sorted(throughput_results)
            results["range"]["throughput"] = (sorted_throughput[-1] -
                                              sorted_throughput[0])

            results["average"]["read_latency"] = statistics.mean(
                read_latency_results)
            results["median"]["read_latency"] = statistics.median(
                read_latency_results)
            results["variance"]["read_latency"] = statistics.variance(
                read_latency_results)
            sorted_read_latency = sorted(read_latency_results)
            results["range"]["read_latency"] = (sorted_read_latency[-1] -
                                                sorted_read_latency[0])

            results["average"]["update_latency"] = statistics.mean(
                update_latency_results)
            results["median"]["update_latency"] = statistics.median(
                update_latency_results)
            results["variance"]["update_latency"] = statistics.variance(
                update_latency_results)
            sorted_update_latency = sorted(update_latency_results)
            results["range"]["update_latency"] = (sorted_update_latency[-1] -
                                                  sorted_update_latency[0])

        logging.info("YCSB Cassandra results: %s", str(results))

        return results
    def run(self, threads):
        """Run OpenSSL three times.

        Args:
            threads (int): The total number of threads on the system.

        Returns:
            If success, a dict containing (unit, run1, run2, run3, average,
            median).

                unit (str): Score units.
                run1 (list): A list of (encrypt, decrypt).
                    encrypt (float): The encryption score for the first run.
                    decrypt (float): The decryption score for the first run.
                run2 (list): A list of (encrypt, decrypt).
                    encrypt (float): The encryption score for the second run.
                    decrypt (float): The decryption score for the second run.
                run3 (list): A list of (encrypt, decrypt).
                    encrypt (float): The encryption score for the third run.
                    decrypt (float): The decryption score for the third run.
                average (list): A list of (encrypt, decrypt).
                    encrypt (float): The encryption average of run1, run2, and
                        run3.
                    decrypt (float): The decryption average of run1, run2, and
                        run3.
                median (list): A list of (encrypt, decrypt).
                    encrypt (float): The encryption median of run1, run2, and
                        run3.
                    decrypt (float): The decryption median of run1, run2, and
                        run3.

            Else, a dict containing (error).

                error (str): Error message.
        """
        taskset_ids = self.__taskset_ids(threads)
        multi_num = self.__multi_num(threads, taskset_ids)
        bin_loc = self.openssl_dir + "/apps/openssl"
        results = {
            "aes-128-gcm": {
                "unit": "B/s",
                "score_size": 8192,
                "score_size_unit": "B",
                "test_bit_size": 128,
                "test": "AES-GCM",
            },
            "aes-256-gcm": {
                "unit": "B/s",
                "score_size": 8192,
                "score_size_unit": "B",
                "test_bit_size": 256,
                "test": "AES-GCM",
            },
        }

        shell_env = os.environ.copy()
        if "LD_LIBRARY_PATH" in shell_env:
            shell_env["LD_LIBRARY_PATH"] = "{}:{}".format(
                shell_env["LD_LIBRARY_PATH"], self.openssl_dir)
        else:
            shell_env["LD_LIBRARY_PATH"] = self.openssl_dir

        if not os.path.isfile(bin_loc):
            text = 'Could not find OpenSSL binaries at "{}".'.format(bin_loc)
            prettify.error_message(text)
            return {"error": text}

        logging.info(
            "Running OpenSSL on ids %s using a total of %d threads.",
            taskset_ids,
            multi_num,
        )

        os.makedirs(self.results_dir, exist_ok=True)

        for test in results:
            encrypt_results = []
            decrypt_results = []

            cmd_base = "taskset -c {} {} speed -multi {} -evp {}".format(
                taskset_ids, bin_loc, multi_num, test)
            cmd_decrypt = cmd_base + " -decrypt"

            self.commands.append("Run: " + cmd_base)
            self.commands.append("Run: " + cmd_decrypt)

            for count in range(1, 4):
                run_num = "run" + str(count)

                encrypt_result_file = "{}/openssl_{}_encrypt_{}.txt".format(
                    self.results_dir, test, run_num)
                decrypt_result_file = "{}/openssl_{}_decrypt_{}.txt".format(
                    self.results_dir, test, run_num)
                cmd_decrypt = cmd_base + " -decrypt"

                logging.debug("Encrypt command: %s", cmd_base)
                logging.debug("LD_LIBRARY_PATH: %s",
                              shell_env["LD_LIBRARY_PATH"])

                optimize.prerun()
                time.sleep(10)

                encrypt_output = execute.output(cmd_base, environment=shell_env)
                file.write(encrypt_result_file, encrypt_output)

                logging.debug("Decrypt command: %s", cmd_base)
                logging.debug("LD_LIBRARY_PATH: %s",
                              shell_env["LD_LIBRARY_PATH"])

                optimize.prerun()
                time.sleep(10)

                decrypt_output = execute.output(cmd_decrypt,
                                                environment=shell_env)
                file.write(decrypt_result_file, decrypt_output)

                encrypt_scores = encrypt_output.rstrip().split("\n")
                decrypt_scores = decrypt_output.rstrip().split("\n")

                if not encrypt_scores:
                    continue
                if not decrypt_scores:
                    continue
                encrypt_score = encrypt_scores[-1].split()[6]
                decrypt_score = decrypt_scores[-1].split()[6]

                if "k" in encrypt_score:
                    encrypt_score = re.sub(r"[^0-9.]", "", encrypt_score)
                if "k" in decrypt_score:
                    decrypt_score = re.sub(r"[^0-9.]", "", decrypt_score)

                # The 'numbers' are in 1000s of bytes per second processed.
                encrypt_score = float(encrypt_score) * 1000.0
                decrypt_score = float(decrypt_score) * 1000.0

                encrypt_results.append(encrypt_score)
                decrypt_results.append(decrypt_score)

                results[test][run_num] = {}
                results[test][run_num]["encrypt"] = encrypt_score
                results[test][run_num]["decrypt"] = decrypt_score

            if encrypt_results and decrypt_results:
                results[test]["average"] = {}
                results[test]["average"]["encrypt"] = statistics.mean(
                    encrypt_results)
                results[test]["average"]["decrypt"] = statistics.mean(
                    decrypt_results)
                results[test]["median"] = {}
                results[test]["median"]["encrypt"] = statistics.median(
                    encrypt_results)
                results[test]["median"]["decrypt"] = statistics.median(
                    decrypt_results)
                results[test]["variance"] = {}
                results[test]["variance"]["encrypt"] = statistics.variance(
                    encrypt_results)
                results[test]["variance"]["decrypt"] = statistics.variance(
                    decrypt_results)
                results[test]["range"] = {}
                sorted_encrypt = sorted(encrypt_results)
                results[test]["range"]["encrypt"] = (sorted_encrypt[-1] -
                                                     sorted_encrypt[0])
                sorted_decrypt = sorted(decrypt_results)
                results[test]["range"]["decrypt"] = (sorted_decrypt[-1] -
                                                     sorted_decrypt[0])

        logging.info("OpenSSL results: %s", str(results))

        return results
    def run(self, mpi_threads, threads, arch=None):
        """Run High-Performance Linpack three times.

        Args:
            mpi_threads (int): The number of MPI threads used by LINPACK. This
                number is usually the number of physical cores on the system.
            threads (int): The total number of logical threads on the system.
            arch (str, optional): The architecture type of the system.

        Returns:
            If success, a dict containing (unit, run1, run2, run3, average,
            median).

                unit (str): Score units.
                run1 (float): Score for the first run.
                run2 (float): Score for the second run.
                run3 (float): Score for the third run.
                average (float): Average of run1, run2, and run3.
                median (float): Median of run1, run2, and run3.

            Else, a dict containing (error).

                error (str): Error message.
        """
        if arch is None:
            arch = "x86_64"

        shell_env = os.environ.copy()
        openmpi_dir = "{}/openmpi/build/bin".format(self.src_dir)
        bin_dir = "{}/bin/{}".format(self.hpl_dir, arch)
        bin_loc = bin_dir + "/xhpl"
        results = {"unit": "GFLOPS", "mathlib": self.mathlib}
        tmp_results = []

        if not os.path.isfile(bin_loc):
            text = 'Could not find HPL binaries at "{}".'.format(bin_loc)
            prettify.error_message(text)
            logging.error(text)
            return {"error": text}

        if not os.path.isdir(openmpi_dir):
            text = 'Could not find OpenMPI directory at "{}".'.format(
                openmpi_dir)
            prettify.error_message(text)
            logging.error(text)
            return {"error": text}

        grid = self.__grid(mpi_threads)
        nb_size = self.__nb_size(threads)

        mpi_cmd = "{}/mpirun -n {} --allow-run-as-root --mca mpi_paffinity_alone 1".format(
            openmpi_dir, mpi_threads)

        if threads == mpi_threads:
            mpi_cmd = "{}/mpirun -n {} --allow-run-as-root".format(
                openmpi_dir, mpi_threads)

        logging.info('Running LINPACK using "%s" arch.', arch)

        os.makedirs(self.results_dir, exist_ok=True)
        shutil.copyfile(self.hpl_dir + "/Make." + arch, self.results_dir)
        shutil.copyfile(self.hpl_dir + "/bin/{}/HPL.dat".format(arch),
                        self.results_dir)

        cmd = mpi_cmd + " ./xhpl"

        self.commands.append("Run: " + cmd)

        optimize.prerun()
        time.sleep(10)

        output = execute.output(cmd,
                                working_dir=bin_dir,
                                environment=shell_env)

        file.write(self.results_dir + "/linpack_output.txt", output)

        result = grep.text(
            output, r"\s+{}\s+{}\s+{}\s+".format(nb_size, grid.P, grid.Q))

        for line in result:
            # 7th word
            tmp = float(line.split()[6])
            tmp_results.append(tmp)

        if tmp_results:
            sorted_results = sorted(tmp_results)
            results["score"] = sorted_results[-1]

        logging.info("LINPACK results: %s", str(results))

        return results
예제 #26
0
    def run(self):
        """Run zlib compression (level 6) and decompression three times.

        Returns:
            If success, a dict containing (unit, run1, run2, run3, average,
            median).

                unit (str): Score units.
                run1 (list): A list of (compress, decompress).
                    compress (float): The compression score for the first run.
                    decompress (float): The decompression score for the first
                        run.
                run2 (list): A list of (compress, decompress).
                    compress (float): The compression score for the second run.
                    decompress (float): The decompression score for the second
                        run.
                run3 (list): A list of (compress, decompress).
                    compress (float): The compression score for the third run.
                    decompress (float): The decompression score for the third
                        run.
                average (list): A list of (compress, decompress).
                    compress (float): The compression average of run1, run2,
                        and run3.
                    decompress (float): The decompression average of run1,
                        run2, and run3.
                median (list): A list of (compress, decompress).
                    compress (float): The compression median of run1, run2, and
                        run3.
                    decompress (float): The decompression median of run1, run2,
                        and run3.

            Else, a dict containing (error).

                error (str): Error message.
        """
        bin32_loc = self.zlib_dir + "/minigzip"
        bin64_loc = self.zlib_dir + "/minigzip64"
        level = 6
        corpus_file = self.corpus_dir + "/corpus.txt"
        corpus_archive = corpus_file + ".zlib"
        results = {"unit": "s"}
        compress_times = []
        decompress_times = []

        if not os.path.isfile(bin32_loc) or not os.path.isfile(bin64_loc):
            text = ('Cannot run zlib because neither "{}" or "{}" could not be'
                    " found.".format(bin32_loc, bin64_loc))
            prettify.error_message(text)
            return {"error": text}

        if not os.path.isfile(corpus_file):
            self.create_corpus()

        logging.info("Running zlib.")

        used_bin = bin64_loc

        if not os.path.isfile(bin64_loc):
            used_bin = bin32_loc

        os.makedirs(self.results_dir, exist_ok=True)

        compress_warmup = "{} -1 < {} > /dev/null".format(used_bin, corpus_file)
        compress_cmd = "{} -{} < {} > {}".format(used_bin, level, corpus_file,
                                                 corpus_archive)
        decompress_warmup = "{} -d < {} > /dev/null".format(
            used_bin, corpus_archive)
        decompress_cmd = "{} -d < {} > /dev/null".format(
            used_bin, corpus_archive)

        self.commands.append("Run - Warmup: " + compress_warmup)
        self.commands.append("Run: " + compress_cmd)
        self.commands.append("Run - Warmup: " + decompress_warmup)
        self.commands.append("Run: " + decompress_cmd)

        for count in range(1, 4):
            run_num = "run" + str(count)
            result_file = "{}/zlib_{}.txt".format(self.results_dir, run_num)

            optimize.prerun()
            time.sleep(10)

            # warm up
            execute.output(compress_warmup, self.corpus_dir)

            compress_time = execute.timed(compress_cmd, self.corpus_dir)

            optimize.prerun()
            time.sleep(10)

            # warm up
            execute.output(decompress_warmup, self.corpus_dir)

            decompress_time = execute.timed(decompress_cmd, self.corpus_dir)

            file.write(
                result_file,
                "Compress Time (Level {}):  {}\n"
                "Decompress Time:          {}\n".format(level, compress_time,
                                                        decompress_time),
            )

            compress_times.append(compress_time)
            decompress_times.append(decompress_time)

            results[run_num] = {}
            results[run_num]["compress"] = compress_time
            results[run_num]["decompress"] = decompress_time

        os.remove(corpus_archive)

        results["average"] = {}
        results["average"]["compress"] = statistics.mean(compress_times)
        results["average"]["decompress"] = statistics.mean(decompress_times)
        results["median"] = {}
        results["median"]["compress"] = statistics.median(compress_times)
        results["median"]["decompress"] = statistics.median(decompress_times)
        results["variance"] = {}
        results["variance"]["compress"] = statistics.variance(compress_times)
        results["variance"]["decompress"] = statistics.variance(
            decompress_times)
        results["range"] = {}
        sorted_compress_times = sorted(compress_times)
        sorted_decompress_times = sorted(decompress_times)
        results["range"]["compress"] = (sorted_compress_times[-1] -
                                        sorted_compress_times[0])
        results["range"]["decompress"] = (sorted_decompress_times[-1] -
                                          sorted_decompress_times[0])

        logging.info("zlib results: %s", str(results))

        return results
예제 #27
0
    def build(self, cache, sockets, cflags=None, stream_array_size=None):
        """Compiles STREAM with GCC.

        Args:
            cache (int): The cache size in bytes (usually L3).
            sockets (int): The number of sockets on the system.
            cflags (str, optional): The CFLAGS for GCC.
            stream_array_size (int, optional): The array size for STREAM.

        Returns:
            Boolean: True if compilation was successful otherwise False.
        """
        if cflags is None:
            cflags = "-march=native -mtune=native"
        if "-O" not in cflags:
            cflags += " -O3 "

        cflags += " -fopenmp "

        stream_file = self.stream_dir + "/stream.c"
        stream_exe = self.stream_dir + "/stream"
        shell_env = os.environ.copy()
        mpi_root = self.src_dir + "/openmpi/build"
        mpi_path = mpi_root + "/bin"
        mpi_lib = mpi_root + "/lib"
        shell_env["PATH"] += ":" + mpi_path
        if "LD_LIBRARY_PATH" in shell_env:
            shell_env["LD_LIBRARY_PATH"] += mpi_lib
        else:
            shell_env["LD_LIBRARY_PATH"] = mpi_lib

        mpicc = mpi_path + "/mpicc"

        if os.path.isfile(stream_exe):
            return True

        if not os.path.isfile(stream_file):
            prettify.error_message('Cannot compile STREAM because "{}" could '
                                   "not be found.".format(stream_file))
            return False

        if stream_array_size is None:
            stream_array_size = self.__set_array_size(cache, sockets)

        if stream_array_size > 4000000000:
            cflags += " -mcmodel=medium "

        cflags += " -D_OPENMP "
        cflags += " -DSTREAM_ARRAY_SIZE={} ".format(stream_array_size)
        cflags += " -DNTIMES=1000 "

        build_cmd = "{} {} stream.c -o stream".format(mpicc, cflags)

        logging.info(
            'Compiling STREAM with %d array size, and "%s" CFLAGS.',
            stream_array_size,
            cflags,
        )

        self.commands.append("Build: " + build_cmd)

        output = execute.output(build_cmd,
                                working_dir=self.stream_dir,
                                environment=shell_env)

        logging.debug(output)

        if os.path.isfile(stream_exe):
            status = os.stat(stream_exe)
            os.chmod(stream_exe, status.st_mode | stat.S_IEXEC)

        if os.path.isfile(stream_exe):
            return True
        return False
예제 #28
0
    def run(self, threads):
        """Run GCC compiled STREAM three times.

        Args:
            threads (int): The total number of threads on the system.

        Returns:
            If success, a dict containing (unit, run1, run2, run3, average,
            median).

                unit (str): Score units.
                run1 (float): Score for the first run.
                run2 (float): Score for the second run.
                run3 (float): Score for the third run.
                average (float): Average of run1, run2, and run3.
                median (float): Median of run1, run2, and run3.

            Else, a dict containing (error).

                error (str): Error message.
        """
        stream_bin = self.stream_dir + "/stream"
        shell_env = os.environ.copy()
        shell_env["OMP_NUM_THREADS"] = str(threads)
        mpi_root = self.src_dir + "/openmpi/build"
        mpi_path = mpi_root + "/bin"
        mpi_lib = mpi_root + "/lib"
        shell_env["PATH"] += ":" + mpi_path
        if "LD_LIBRARY_PATH" in shell_env:
            shell_env["LD_LIBRARY_PATH"] += mpi_lib
        else:
            shell_env["LD_LIBRARY_PATH"] = mpi_lib
        results = {"unit": "MB/s"}

        shell_env["OMP_PROC_BIND"] = "true"

        if not os.path.isfile(stream_bin):
            text = 'Cannot run STREAM because "{}" could not be found.'.format(
                stream_bin)
            prettify.error_message(text)
            return {"error": text}

        logging.info("Running STREAM with %d OMP threads.", threads)

        os.makedirs(self.results_dir, exist_ok=True)

        tmp_results = []

        cmd = "./stream"

        self.commands.append("Run: OMP_NUM_THREADS = " + str(threads))
        self.commands.append("Run: OMP_PROC_BIND = true")
        self.commands.append("Run: " + cmd)

        for count in range(1, 4):
            run_num = "run" + str(count)
            result_file = "{}/stream_{}.txt".format(self.results_dir, run_num)

            optimize.prerun()
            time.sleep(10)

            output = execute.output(cmd,
                                    working_dir=self.stream_dir,
                                    environment=shell_env)

            file.write(result_file, output)

            result = grep.text(output, "Triad")
            result = result[0].split()[1]  # 2nd word
            result = float(result)
            results[run_num] = result
            tmp_results.append(result)

        results["average"] = statistics.mean(tmp_results)
        results["median"] = statistics.median(tmp_results)
        results["variance"] = statistics.variance(tmp_results)
        sorted_results = sorted(tmp_results)
        results["range"] = sorted_results[-1] - sorted_results[0]

        logging.info("STREAM results: %s", str(results))

        return results
    def build(self, glibc_ver, cores=None, cflags=None):
        """Compiles OpenSSL.

        Notes:
            * Requires glibc to be built in `/usr/local/glibc`

        Args:
            glibc_ver (str): The glibc version installed in the source
                directory.
            cores (int, optional): The number of cores on the system.
            cflags (str, optional): The CFLAGS for GCC.

        Returns:
            Boolean: True if compilation was successful otherwise False.
        """
        if cores is None:
            cores = 1
        if cflags is None:
            cflags = "-march=native -mtune=native"
        if "-O" not in cflags:
            cflags += " -O3 "

        bin_loc = self.openssl_dir + "/apps/openssl"
        shell_env = os.environ.copy()

        shell_env["CFLAGS"] = cflags

        if os.path.isfile(bin_loc):
            return True

        if not os.path.isdir(self.openssl_dir):
            prettify.error_message(
                'Cannot compile OpenSSL because "{}" could not be found.'.
                format(bin_loc))
            return False

        logging.info(
            "Compiling OpenSSL with glibc version %s, %d Make threads"
            ', and "%s" CFLAGS.',
            glibc_ver,
            cores,
            str(shell_env["CFLAGS"]),
        )

        os.makedirs(self.openssl_dir + "/build", exist_ok=True)

        config_cmd = ("./config -Wl,--rpath=/usr/local/glibc/lib "
                      "-Wl,--dynamic-linker=/usr/local/glibc/lib/ld-{0}.so "
                      "-Wl,-rpath,{1} --prefix={1}/build".format(
                          glibc_ver, self.openssl_dir))
        make_cmd = "make -s -j {}".format(cores)
        install_cmd = "make -s -j {} install".format(cores)

        self.commands.append("Build: CFLAGS = " + cflags)
        self.commands.append("Config: " + config_cmd)
        self.commands.append("Compile: " + make_cmd)
        self.commands.append("Install: " + install_cmd)

        logging.debug("Config command:\n%s\n", config_cmd)

        execute.output(config_cmd, self.openssl_dir, environment=shell_env)

        compile_output = execute.output("make -s -j {}".format(cores),
                                        self.openssl_dir,
                                        environment=shell_env)

        logging.debug("Compilation warnings/errors:\n%s", compile_output)

        install_output = execute.output(install_cmd, self.openssl_dir)
        logging.debug("Installation warnings/errors:\n%s", install_output)

        if os.path.isfile(bin_loc):
            return True
        return False
    def build(self, linux_ver, cores=None, cflags=None):
        """Builds the image for Docker to compile the Linux kernel.

        Args:
            linux_ver (str): The Linux kernel version.
            cores (int, optional): The number of Make cores.
            cflags (str, optional): The CFLAGS for GCC.

        Returns:
            Boolean: True if build was successful otherwise False.
        """
        if cores is None:
            cores = 1
        if cflags is None:
            cflags = "-march=native -mtune=native"
        if "-O" not in cflags:
            cflags += " -O3 "

        built = False
        pid_file = "/tmp/docker.pid"
        build_name = "compile_kernel"
        dockerfile = self.docker_dir + "/Dockerfile"

        shell_env = os.environ.copy()
        shell_env["PATH"] = self.docker_dir + ":" + shell_env["PATH"]
        shell_env["CFLAGS"] = cflags

        major_version = linux_ver.split(".")[0]
        url = ("http://www.kernel.org/pub/linux/kernel/v{}.x/"
               "linux-{}.tar.gz").format(major_version, linux_ver)
        build_cmd = (
            'docker build --build-arg cores={} --build-arg cflags="{}" '
            "--ulimit nofile=1048576:1048576 --build-arg url={} "
            "--build-arg version={} -t {} {}".format(cores, cflags, url,
                                                     linux_ver, build_name,
                                                     self.docker_dir))

        self.commands.append("Build: " + build_cmd)

        if not os.path.isfile(self.docker_dir + "/dockerd"):
            prettify.error_message("Cannot build. Docker directory not found.")
            return False

        os.makedirs(self.data_dir, exist_ok=True)

        # Start Docker daemon
        logging.debug("Starting Docker daemon.")
        subprocess.Popen(
            "{}/dockerd --pidfile {} --data-root {} &".format(
                self.docker_dir, pid_file, self.data_dir),
            cwd=self.docker_dir,
            shell=True,
            env=shell_env,
            stdout=subprocess.DEVNULL,
            stderr=subprocess.DEVNULL,
        )
        time.sleep(20)

        # Make sure Docker has enough IPs available to assign to containers
        if shutil.which("ifconfig"):
            execute.output(
                "ifconfig docker0 down && ifconfig docker0 172.17.0.1/16 up")

        if not os.path.isfile(dockerfile):
            shutil.copyfile(self.src_dir + "/provided/Dockerfile", dockerfile)

        if not self.__image_built(build_name, env=shell_env):
            build_output = execute.output(build_cmd,
                                          working_dir=self.docker_dir,
                                          environment=shell_env)
            logging.debug(build_output)

        if self.__image_built(build_name, env=shell_env):
            logging.info("Docker image built.")
            built = True

        # Stop Docker daemon
        if os.path.exists(pid_file):
            logging.debug("Stopping Docker daemon.")
            pid = file.read(pid_file).strip()
            execute.kill(pid)
            execute.kill(pid)
            time.sleep(5)

        return built