def test_prepare_slurm(self, mock_create, mock_export) -> None: runner = CliRunner() bb_root = str(vara_cfg()["benchbuild_root"]) node_dir = "/tmp/foo" export_dir = "/tmp/containers/export" vara_cfg()["container"]["research_tool"] = None result = runner.invoke(driver_container.main, [ "prepare-slurm", "-i", "DEBIAN_10", "-t", "vara", "--export-dir", export_dir, "--node-dir", node_dir ]) self.assertEqual(0, result.exit_code, result.exception) # check vara config self.assertEqual("vara", str(vara_cfg()["container"]["research_tool"])) # check slurm config self.assertEqual(f"{bb_root}/slurm_container.sh.inc", str(bb_cfg()["slurm"]["template"])) self.assertTrue(Path(f"{bb_root}/slurm_container.sh.inc").is_file()) # check bb container config self.assertEqual(f"{node_dir}/containers/lib", str(bb_cfg()["container"]["root"])) self.assertEqual(f"{node_dir}/containers/run", str(bb_cfg()["container"]["runroot"])) self.assertEqual(export_dir, str(bb_cfg()["container"]["export"])) self.assertEqual(export_dir, str(bb_cfg()["container"]["import"])) self.assertTrue(Path(export_dir).is_dir())
def test_create_base_image_from_source(self) -> None: """Test varats install from source.""" vara_cfg()["container"]["research_tool"] = None vara_cfg()["container"]["from_source"] = True vara_cfg()["container"]["varats_source"] = "varats_src" bb_cfg()["container"]["from_source"] = False bb_cfg()["jobs"] = 42 with TemporaryDirectory() as tmpdir: image_context = BaseImageCreationContext(ImageBase.DEBIAN_10, Path(tmpdir)) _create_base_image_layers(image_context) layers = image_context.layers varats_install_layer = self.check_layer_type(layers[12], RunLayer) self.assertEqual("pip3", varats_install_layer.command) self.assertTupleEqual(("install", "--ignore-installed", "/varats/varats-core", "/varats/varats"), varats_install_layer.args) mounting_parameters = "type=bind,src=varats_src,target=/varats" if buildah_version() >= (1, 24, 0): mounting_parameters += ",rm" self.assertIn(("mount", mounting_parameters), varats_install_layer.kwargs)
def compile(self) -> None: """Compile the project.""" brotli_version_source = local.path(self.source_of_primary) brotli_git_path = get_local_project_git_path(self.NAME) brotli_version = ShortCommitHash(self.version_of_primary) with local.cwd(brotli_git_path): configure_revisions = get_all_revisions_between( "f9ab24a7aaee93d5932ba212e5e3d32e4306f748", "5814438791fb2d4394b46e5682a96b68cd092803", ShortCommitHash) simple_make_revisions = get_all_revisions_between( "e1739826c04a9944672b99b98249dda021bdeb36", "378485b097fd7b80a5e404a3cb912f7b18f78cdb", ShortCommitHash) c_compiler = bb.compiler.cc(self) if brotli_version in simple_make_revisions: with local.cwd(brotli_version_source / "tools"): bb.watch(make)("-j", get_number_of_jobs(bb_cfg())) elif brotli_version in configure_revisions: with local.cwd(brotli_version_source): with local.env(CC=str(c_compiler)): bb.watch(local["./configure"])() bb.watch(make)("-j", get_number_of_jobs(bb_cfg())) else: mkdir(brotli_version_source / "out") with local.cwd(brotli_version_source / "out"): with local.env(CC=str(c_compiler)): bb.watch(local["../configure-cmake"])() bb.watch(make)("-j", get_number_of_jobs(bb_cfg())) with local.cwd(brotli_version_source): verify_binaries(self)
def test_bb_run_slurm_and_container(self, mock_sbatch, mock_build_images) -> None: runner = CliRunner() vara_cfg()['paper_config']['current_config'] = "test_revision_lookup" # needed so we see the paper config load_paper_config() # needed so benchbuild sees the paper config save_config() bb_cfg()["slurm"]["template"] = str( Path(str(vara_cfg()["benchbuild_root"])) / "slurm_container.sh.inc") save_bb_config() # Easiest way to configure slurm + container is 'vara-container' # As a side-effect, this command is now even more tested :) prepare_result = runner.invoke(driver_container.main, ["prepare-slurm"]) self.assertEqual(0, prepare_result.exit_code, prepare_result.exception) self.assertTrue(Path(str(bb_cfg()["slurm"]["template"])).exists()) result = runner.invoke(driver_run.main, ["--slurm", "--container", "-E", "JustCompile"]) self.assertEqual(0, result.exit_code, result.exception) self.assertTrue((Path(str(vara_cfg()["benchbuild_root"])) / "JustCompile-slurm.sh").exists())
def __is_slurm_prepared() -> bool: """Check whether the slurm/container setup seems to be configured properly.""" if not bb_cfg()["container"]["root"].value: return False if not Path(bb_cfg()["slurm"]["template"].value).exists(): return False return True
def _add_benchbuild_config(image_context: BaseImageCreationContext) -> None: # copy libraries to image if LD_LIBRARY_PATH is set if "LD_LIBRARY_PATH" in bb_cfg()["env"].value.keys(): image_context.layers.copy_(bb_cfg()["env"].value["LD_LIBRARY_PATH"], str(image_context.varats_root / "libs")) image_context.append_to_env("LD_LIBRARY_PATH", [str(image_context.varats_root / "libs")]) # set BB config via env vars image_context.layers.env( BB_VARATS_OUTFILE=str(image_context.varats_root / "results"), BB_VARATS_RESULT=str(image_context.varats_root / "BC_files"), BB_JOBS=str(bb_cfg()["jobs"]), BB_ENV=to_yaml(image_context.env))
def test_create_dev_image(self) -> None: """Test VaRA install inside container.""" vara_cfg()["vara"]["llvm_source_dir"] = "tools_src/vara-llvm-project" vara_cfg()["vara"]["llvm_install_dir"] = "tools/VaRA" vara_cfg()["container"]["from_source"] = False bb_cfg()["container"]["from_source"] = False with TemporaryDirectory() as tmpdir: research_tool = get_research_tool("vara") image_context = BaseImageCreationContext(ImageBase.DEBIAN_10, Path(tmpdir)) _create_dev_image_layers(image_context, research_tool) layers = image_context.layers # check that varats will be installed properly varats_install_layer = self.check_layer_type(layers[10], RunLayer) self.assertEqual("pip3", varats_install_layer.command) self.assertTupleEqual( ("install", '--ignore-installed', "varats-core", "varats"), varats_install_layer.args) varats_copy_config_layer = self.check_layer_type(layers[12], CopyLayer) self.assertEqual("/varats_root/.varats.yaml", varats_copy_config_layer.destination) # check that research tool dependencies will be installed vara_deps_layer = self.check_layer_type(layers[11], RunLayer) self.assertEqual("apt", vara_deps_layer.command) # check that correct entry point will be set entrypoint_layer = self.check_layer_type(layers[16], EntryPoint) self.assertEqual(("vara-buildsetup", ), entrypoint_layer.command)
def test_vara_install(self, mock_install_exists) -> None: """Test VaRA install inside container.""" mock_install_exists.return_value = True vara_cfg()["container"]["research_tool"] = "vara" vara_cfg()["vara"]["llvm_source_dir"] = "tools_src/vara-llvm-project" vara_cfg()["vara"]["llvm_install_dir"] = "tools/VaRA" vara_cfg()["container"]["from_source"] = False bb_cfg()["container"]["from_source"] = False with TemporaryDirectory() as tmpdir: image_context = BaseImageCreationContext(ImageBase.DEBIAN_10, Path(tmpdir)) _create_base_image_layers(image_context) layers = image_context.layers vara_deps_layer = self.check_layer_type(layers[11], RunLayer) self.assertEqual("apt", vara_deps_layer.command) vara_copy_layer = self.check_layer_type(layers[12], CopyLayer) self.assertEqual("/varats_root/tools/VaRA_DEBIAN_10", vara_copy_layer.destination) self.assertTupleEqual(("tools/VaRA_DEBIAN_10", ), vara_copy_layer.sources) bb_config_layer = self.check_layer_type(layers[15], UpdateEnv) self.assertTupleEqual( ("BB_ENV", "{PATH: [/varats_root/tools/VaRA_DEBIAN_10/bin]}"), bb_config_layer.env[3])
def get_cached_bc_file_path( project: Project, binary: ProjectBinaryWrapper, required_bc_file_extensions: tp.Optional[tp.List[BCFileExtensions]] = None, ) -> Path: """ Look up the path to a BC file from the BC cache. Args: project: the project binary: which corresponds to the BC file required_bc_file_extensions: list of required file extensions Returns: path to the cached BC file """ bc_cache_folder = local.path( Extract.BC_CACHE_FOLDER_TEMPLATE.format( cache_dir=str(bb_cfg()["varats"]["result"]), project_name=str(project.name))) bc_file_path = bc_cache_folder / Extract.get_bc_file_name( project_name=project.name, binary_name=binary.name, project_version=project.version_of_primary, bc_file_extensions=required_bc_file_extensions) if not bc_file_path.exists(): raise LookupError( "No corresponding BC file found in cache. Project was probably not" " compiled with the correct compile/extract action.") return Path(bc_file_path)
def project_bc_files_in_cache( project: Project, required_bc_file_extensions: tp.Optional[tp.List[BCFileExtensions]] ) -> bool: """ Checks if all bc files, corresponding to the projects binaries, are in the cache. Args: project: the project required_bc_file_extensions: list of required file extensions Returns: True, if all BC files are present, False otherwise. """ all_files_present = True for binary in project.binaries: all_files_present &= path.exists( local.path( Extract.BC_CACHE_FOLDER_TEMPLATE.format( cache_dir=str(bb_cfg()["varats"]["result"]), project_name=str(project.name)) + Extract.get_bc_file_name( project_name=str(project.name), binary_name=binary.name, project_version=project.version_of_primary, bc_file_extensions=required_bc_file_extensions))) return all_files_present
def compile(self) -> None: """Compile the project.""" xz_git_path = get_local_project_git_path(self.NAME) xz_version_source = local.path(self.source_of_primary) xz_version = self.version_of_primary # dynamic linking is off by default until # commit f9907503f882a745dce9d84c2968f6c175ba966a # (fda4724 is its parent) with local.cwd(xz_git_path): revisions_wo_dynamic_linking = get_all_revisions_between( "5d018dc03549c1ee4958364712fb0c94e1bf2741", "fda4724d8114fccfa31c1839c15479f350c2fb4c", ShortCommitHash) self.cflags += ["-fPIC"] clang = bb.compiler.cc(self) with local.cwd(xz_version_source): with local.env(CC=str(clang)): bb.watch(autoreconf)("--install") configure = bb.watch(local["./configure"]) if xz_version in revisions_wo_dynamic_linking: configure("--enable-dynamic=yes") else: configure() bb.watch(make)("-j", get_number_of_jobs(bb_cfg())) verify_binaries(self)
def test_without_versions(self): """Test if we get the correct revision if no VaRA modifications are enabled.""" bb_cfg()["versions"]["full"] = False sample_gen = self.vers_expr.sample(BBTestProject) self.assertEqual(sample_gen[0]["test_source"].version, "rev1000000") self.assertEqual(len(sample_gen), 1)
def __compile_make(self) -> None: libssh_source = local.path(self.source_of(self.primary_source)) libssh_version = ShortCommitHash(self.version_of_primary) autoconf_revisions = get_all_revisions_between( "5e02c25291d594e01a910fce097a3fc5084fd68f", "21e639cc3fd54eb3d59568744c9627beb26e07ed", ShortCommitHash, libssh_source ) autogen_revisions = get_all_revisions_between( "ca32b0aa146b31d7772f27d16098845e615432aa", "ee54acb417c5589a8dc9dab0676f34b3d40a182b", ShortCommitHash, libssh_source ) compiler = bb.compiler.cc(self) with local.cwd(libssh_source): with local.env(CC=str(compiler)): if libssh_version in autogen_revisions: bb.watch("./autogen.sh")() if libssh_version in autoconf_revisions: bb.watch("autoreconf")() configure = bb.watch(local["./configure"]) configure() bb.watch(make)("-j", get_number_of_jobs(bb_cfg())) verify_binaries(self)
def get_local_project_git_path(project_name: str, git_name: tp.Optional[str] = None) -> Path: """ Get the path to the local download location of a git repository for a given benchbuild project. Args: project_name: name of the given benchbuild project git_name: name of the git repository, i.e., the name of the repository folder. If no git_name is provided, the name of the primary source is used. Returns: Path to the local download location of the git repository. """ if git_name: source = get_extended_commit_lookup_source(project_name, git_name) else: source = get_primary_project_source(project_name) if not is_git_source(source): raise AssertionError(f"Project {project_name} does not use git.") base = Path(str(bb_cfg()["tmp_dir"])) git_path: Path = base / source.local if not git_path.exists(): git_path = base / source.local.replace(os.sep, "-") if not git_path.exists(): git_path = Path(source.fetch()) return git_path
def compile(self) -> None: """Compile the project.""" gzip_version_source = local.path(self.source_of_primary) # Build binaries in separate dir because executing the binary with path # 'gzip' will execute '/usr/bin/gzip' independent of the current working # directory. mkdir("-p", gzip_version_source / "build") self.cflags += [ "-Wno-error=string-plus-int", "-Wno-error=shift-negative-value", "-Wno-string-plus-int", "-Wno-shift-negative-value" ] with local.cwd(gzip_version_source): bb.watch(local["./bootstrap"])() c_compiler = bb.compiler.cc(self) with local.cwd(gzip_version_source / "build"), local.env(CC=str(c_compiler)): bb.watch(local["../configure"])() bb.watch(make)("-j", get_number_of_jobs(bb_cfg())) with local.cwd(gzip_version_source): verify_binaries(self)
def compile(self) -> None: """Compile the project.""" x264_version_source = local.path(self.source_of_primary) x264_version = ShortCommitHash(self.version_of_primary) fpic_revisions = get_all_revisions_between( "5dc0aae2f900064d1f58579929a2285ab289a436", "290de9638e5364c37316010ac648a6c959f6dd26", ShortCommitHash, x264_version_source) ldflags_revisions = get_all_revisions_between( "6490f4398d9e28e65d7517849e729e14eede8c5b", "275ef5332dffec445a0c5a78dbc00c3e0766011d", ShortCommitHash, x264_version_source) if x264_version in fpic_revisions: self.cflags += ["-fPIC"] clang = bb.compiler.cc(self) with local.cwd(x264_version_source): with local.env(CC=str(clang)): configure_flags = ["--disable-asm"] if x264_version in ldflags_revisions: configure_flags.append("--extra-ldflags=\"-static\"") bb.watch(local["./configure"])(configure_flags) bb.watch(make)("-j", get_number_of_jobs(bb_cfg())) verify_binaries(self)
def sample(cls, prj_cls: tp.Type[Project]) -> tp.List[source.VariantContext]: """ Adapt version sampling process if needed, otherwise fallback to default implementation. Args: prj_cls: project class Returns: list of sampled versions """ variants = list(source.product(*prj_cls.SOURCE)) if bool(vara_cfg()["experiment"]["random_order"]): random.shuffle(variants) fs_blacklist = vara_cfg()["experiment"]["file_status_blacklist"].value fs_whitelist = vara_cfg()["experiment"]["file_status_whitelist"].value if fs_blacklist or fs_whitelist: fs_good = set(FileStatusExtension) if not fs_whitelist else set() fs_good -= { FileStatusExtension.get_file_status_from_str(x) for x in fs_blacklist } fs_good |= { FileStatusExtension.get_file_status_from_str(x) for x in fs_whitelist } report_specific_bad_revs = [] for report_type in cls.report_spec(): report_specific_bad_revs.append({ revision.hash for revision, file_status in get_tagged_experiment_specific_revisions( prj_cls, report_type, experiment_type=cls ) if file_status not in fs_good }) bad_revisions = report_specific_bad_revs[0].intersection( *report_specific_bad_revs[1:] ) variants = list( filter(lambda var: str(var[0]) not in bad_revisions, variants) ) if not variants: print("Could not find any unprocessed variants.") return [] variants = cls._sample_num_versions(variants) if bool(bb_cfg()["versions"]["full"]): return [source.context(*var) for var in variants] return [source.context(*variants[0])]
def __compile_make(self) -> None: gravity_version_source = local.path(self.source_of_primary) clang = bb.compiler.cc(self) with local.cwd(gravity_version_source): with local.env(CC=str(clang)): bb.watch(make)("-j", get_number_of_jobs(bb_cfg())) verify_binaries(self)
def _unset_varats_source_mount( image_context: BaseImageCreationContext) -> None: mounts = bb_cfg()["container"]["mounts"].value mounts[:] = [ mount for mount in mounts if mount[1] != str(image_context.varats_source_mount_target) ] save_bb_config()
def copy_to_env(self, path: Path) -> None: with self.__lock: bb_tmp = str(path / "benchbuild/tmp") settings.bb_cfg()["tmp_dir"] = bb_tmp base.CFG["tmp_dir"] = bb_tmp git( "clone", "--dissociate", "--recurse-submodules", "--reference", self.__local, self.__remote, f"{bb_tmp}/{self.__repo_name}" )
def _build_in_container(tool: ResearchTool[SpecificCodeBase], image_base: ImageBase, build_type: BuildType, install_prefix: tp.Optional[Path] = None) -> None: vara_cfg()["container"]["research_tool"] = tool.name image_name = f"{image_base.image_name}_{build_type.name}" if not install_prefix: install_prefix = Path( str(tool.install_location()) + "_" + image_base.name) if not install_prefix.exists(): install_prefix.mkdir(parents=True) source_mount = 'tools_src/' install_mount = 'tools/' click.echo("Preparing container image.") create_dev_image(image_base, tool) with TemporaryDirectory() as tmpdir: image_context = BaseImageCreationContext(image_base, Path(tmpdir)) source_mount = str(image_context.varats_root / source_mount) install_mount = str(image_context.varats_root / install_mount) bb_cfg()["container"]["mounts"].value[:] += [ # mount tool src dir [str(tool.source_location()), source_mount], # mount install dir [str(install_prefix), install_mount] ] click.echo(f"Building {tool.name} ({build_type.name}) " f"in a container ({image_base.name}).") run_container(image_name, f"build_{tool.name}", None, [ "build", tool.name.lower(), "--no-update-prompt", f"--build-type={build_type.name}", f"--source-location={source_mount}", f"--install-prefix={install_mount}", f"--build-folder-suffix={image_base.name}", ])
def compile(self) -> None: """Compile the project.""" redis_source = local.path(self.source_of_primary) clang = bb.compiler.cc(self) with local.cwd(redis_source): with local.env(CC=str(clang)): bb.watch(make)("-j", get_number_of_jobs(bb_cfg())) verify_binaries(self)
def test_result_folder_creation(self): """Checks if we get the correct result folder back.""" test_tmp_folder = str(os.getcwd()) bb_cfg()["varats"]["outfile"] = test_tmp_folder + "/results" result_folder = EU.get_varats_result_folder(BBTestProject()) self.assertEqual(test_tmp_folder + "/results/" + BBTestProject.NAME, str(result_folder)) self.assertTrue(result_folder.exists())
def export_base_image(base: ImageBase) -> None: """Export the base image to the filesystem.""" publish = bootstrap.bus() export_name = fs_compliant_name(base.image_name) export_path = Path( local.path(bb_cfg()["container"]["export"].value) / export_name + ".tar") if export_path.exists() and export_path.is_file(): export_path.unlink() publish(ExportImage(base.image_name, str(export_path)))
def compile(self) -> None: """Compile the project.""" libjpeg_version_source = local.path(self.source_of_primary) c_compiler = bb.compiler.cc(self) with local.cwd(libjpeg_version_source): with local.env(CC=str(c_compiler)): bb.watch(cmake)("-G", "Unix Makefiles", ".") bb.watch(make)("-j", get_number_of_jobs(bb_cfg())) verify_binaries(self)
def compile(self) -> None: grep_source = local.path(self.source_of_primary) compiler = bb.compiler.cc(self) with local.cwd(grep_source): with local.env(CC=str(compiler)): bb.watch(local["./bootstrap"])() bb.watch(local["./configure"])("--disable-gcc-warnings") bb.watch(make)("-j", get_number_of_jobs(bb_cfg())) verify_binaries(self)
def compile(self) -> None: """Compile the project.""" openssl_source = local.path(self.source_of_primary) compiler = bb.compiler.cc(self) with local.cwd(openssl_source): with local.env(CC=str(compiler)): bb.watch(local['./config'])() bb.watch(make)("-j", get_number_of_jobs(bb_cfg())) verify_binaries(self)
def test_if_projects_were_added(self): """Test if all projects were added to the benchbuild config.""" excluded_projects = ["varats.experiments.c_projects.glibc"] loaded_plugins = bb_cfg()["plugins"]["projects"].value self.check_all_files_in_config_list("varats.projects.c_projects", loaded_plugins, excluded_projects) self.check_all_files_in_config_list("varats.projects.cpp_projects", loaded_plugins, excluded_projects) self.check_all_files_in_config_list("varats.projects.perf_tests", loaded_plugins, excluded_projects)
def _create_dev_image_layers(image_context: BaseImageCreationContext, research_tool: ResearchTool[tp.Any]) -> None: image_context.layers.run('pip3', 'install', '--upgrade', 'pip') _add_varats_layers(image_context) if bb_cfg()['container']['from_source']: add_benchbuild_layers(image_context.layers) research_tool.container_install_dependencies(image_context) _add_vara_config(image_context) _add_benchbuild_config(image_context) image_context.layers.workingdir(str(image_context.varats_root)) image_context.layers.entrypoint("vara-buildsetup")
def compile(self) -> None: """Compile the project.""" glib_source = local.path(self.source_of_primary) cc_compiler = bb.compiler.cc(self) with local.cwd(glib_source): with local.env(CC=str(cc_compiler)): bb.watch(meson)("build") bb.watch(ninja)("-j", get_number_of_jobs(bb_cfg()), "-C", "build") verify_binaries(self)