def setup_bash_in_container(builddir, container, outfile, mounts, shell): with local.cwd(builddir): # Switch to bash inside uchroot print("Entering bash inside User-Chroot. Prepare your image and " "type 'exit' when you are done. If bash exits with a non-zero" "exit code, no new container will be stored.") store_new_container = True try: run_in_container(shell, container, mounts) except ProcessExecutionError: store_new_container = False if store_new_container: # pylint: disable=W0104 print("Packing new container image.") container_filename = os.path.split(container)[-1] container_out = os.path.join("container-out", container_filename) container_out = os.path.abspath(container_out) # Pack the results to: container-out with local.cwd("container-in"): tar("cjf", container_out, ".") update_hash(container_filename, os.path.dirname(container_out)) outdir = os.path.dirname(outfile) if not os.path.exists(outdir): mkdir("-p", outdir) mv(container_out, outfile)
def configure(self): from benchbuild.utils.run import run # First we have to prepare boost for lady povray... boost_dir = path.join(self.builddir, self.boost_src_dir) boost_prefix = path.join(self.builddir, "boost-install") with local.cwd(boost_dir): from plumbum.cmd import mkdir mkdir(boost_prefix) bootstrap = local["./bootstrap.sh"] run(bootstrap["--with-toolset=clang", "--prefix=\"{0}\"".format( boost_prefix)]) b2 = local["./b2"] run(b2["--ignore-site-config", "variant=release", "link=static", "threading=multi", "optimization=speed", "install"]) povray_dir = path.join(self.builddir, self.src_dir) with local.cwd(path.join(povray_dir, "unix")): from plumbum.cmd import sh sh("prebuild.sh") with local.cwd(povray_dir): from benchbuild.utils.compiler import lt_clang, lt_clang_cxx with local.cwd(self.builddir): clang = lt_clang(self.cflags, self.ldflags, self.compiler_extension) clang_cxx = lt_clang_cxx(self.cflags, self.ldflags, self.compiler_extension) configure = local["./configure"] with local.env(COMPILED_BY="BB <*****@*****.**>", CC=str(clang), CXX=str(clang_cxx)): run(configure["--with-boost=" + boost_prefix])
def setup_prepare(self): """Unpacks loads local docker images and generates the topology. """ # Delete old artifacts, if any. cmd.rm("-rf", self.test_state.artifacts) cmd.mkdir(self.test_state.artifacts) print("artifacts dir: %s" % self.test_state.artifacts) for tar in self.test_state.containers_tars: print(cmd.docker("image", "load", "-i", tar)) for loader in self.test_state.container_loaders: parts = loader.split("#") if len(parts) != 2: logger.error("Invalid container loader argument: %s, ignored" % loader) continue tag, script = parts[0], parts[1] o = subprocess.check_output([script]).decode("utf-8") idx = o.index("as ") if idx < 0: logger.error("extracting tag from loader script %s" % loader) continue bazel_tag = o[idx + len("as "):].strip() logger.info("docker tag %s %s" % (bazel_tag, tag)) subprocess.run(["docker", "tag", bazel_tag, tag], check=True) # Define where coredumps will be stored. print( cmd.docker("run", "--rm", "--privileged", "alpine", "sysctl", "-w", "kernel.core_pattern=/share/coredump")) self._setup_generate()
def mount_via_cron(device, volume_id): logger.info("Ensuring cron mount") mount_script = "/volumes/automount/%s" % volume_id mount_script_inner = "/host_root%s" % mount_script mounted_mark = "/volumes/automount/.mounted-%s" % volume_id mounted_mark_inner = "/host_root%s" % mounted_mark if not local.path('/host_root/volumes/automount').exists(): mkdir('-p', "/host_root/volumes/automount") sh('-c', "echo '* * * * * root cd / && run-parts --report /volumes/automount' >> /host_root/etc/crontab") mkdir('-p', "/host_root/volumes/%s" % volume_id) local.path(mount_script_inner).write( """#!/usr/bin/env sh set -e mount {device} /volumes/{volume_id} touch {mounted_mark} rm {mount_script} """.format(**locals()) ) chmod('+x', "/host_root%s" % mount_script) @retry(wait_fixed=2000, stop_max_attempt_number=60) def wait_for_mount(): logger.info("Waiting for mount") if local.path(mounted_mark_inner).exists(): local.path(mounted_mark_inner).delete() return True else: raise NotMountedException() wait_for_mount()
def run_tests(self, experiment): from plumbum.cmd import mkdir from benchbuild.project import wrap from benchbuild.utils.run import run povray_dir = path.join(self.builddir, self.src_dir) povray_binary = path.join(povray_dir, "unix", self.name) tmpdir = path.join(self.builddir, "tmp") povini = path.join(self.builddir, "cfg", ".povray", "3.6", "povray.ini") scene_dir = path.join(self.builddir, "share", "povray-3.6", "scenes") mkdir(tmpdir, retcode=None) povray = wrap(povray_binary, experiment) pov_files = find(scene_dir, "-name", "*.pov").splitlines() for pov_f in pov_files: from plumbum.cmd import head, grep, sed with local.env(POVRAY=povray_binary, INSTALL_DIR=self.builddir, OUTPUT_DIR=tmpdir, POVINI=povini): options = ((((head["-n", "50", "\"" + pov_f + "\""] | grep["-E", "'^//[ ]+[-+]{1}[^ -]'"]) | head["-n", "1"]) | sed["s?^//[ ]*??"]) & FG) run(povray["+L" + scene_dir, "+L" + tmpdir, "-i" + pov_f, "-o" + tmpdir, options, "-p"], retcode=None)
def setup(self): shutil.rmtree(self.test_state.artifacts) cmd.mkdir(self.test_state.artifacts) shutil.copytree(self.conf_dir, self.test_state.artifacts + "/conf") sudo("mkdir -p /var/run/netns") pause_image = exec_docker("image load -q -i %s" % self.pause_tar).rsplit(' ', 1)[1] exec_docker("image load -q -i %s" % self.image_tar) exec_docker("run -d --network=none --name pause %s" % pause_image) ns = exec_docker( "inspect pause -f '{{.NetworkSettings.SandboxKey}}'").replace( "'", "") sudo("ln -sfT %s /var/run/netns/pause" % ns) self.create_veths("pause") sudo("rm /var/run/netns/pause") envs = ["-e SCION_EXPERIMENTAL_BFD_DISABLE=true"] if self.bfd: envs = [] exec_docker("run -v %s/conf:/share/conf -d %s --network container:%s \ --name router %s" % (self.test_state.artifacts, " ".join(envs), "pause", "bazel/acceptance/router_multi:router")) time.sleep(1)
def makeAtlases(target, trainingTable, outdir, mabs=False): outdir = local.path(outdir) from plumbum.cmd import mkdir mkdir('-p', outdir) logging.info( 'Create atlases: compute transforms from images to target and apply') for idx, r in trainingTable.iterrows(): warp = outdir / 'warp{idx}.nii.gz'.format(**locals()) atlas = outdir / 'atlas{idx}.nii.gz'.format(**locals()) computeWarp(r['image'], target, warp) applyWarp(r['image'], warp, target, atlas) for labelname, label in r.iloc[1:].iteritems(): atlaslabel = outdir / '{labelname}{idx}.nii.gz'.format(**locals()) applyWarp(label, warp, target, atlaslabel, interpolation='NearestNeighbor') if mabs: from plumbum.cmd import unu, ConvertBetweenFileFormats, AverageImages for labelname in list(trainingTable)[1:]: out = outdir / labelname + '.nrrd' labelmaps = outdir // labelname + '*' with TemporaryDirectory() as tmpdir: nii = tmpdir / 'mabs.nii.gz' AverageImages('3', nii, '0', *labelmaps) ConvertBetweenFileFormats(nii, out) unu['2op', 'gt', out, '0.5'] | \ unu['save', '-e', 'gzip', '-f', 'nrrd', '-o', out] & FG
def configure(self): from plumbum.cmd import mkdir, rm sandbox_dir = path.join(self.builddir, "run") if path.exists(sandbox_dir): rm("-rf", sandbox_dir) mkdir(sandbox_dir)
def collect_logs(self, out_dir: str = "logs/docker"): """Collects the logs from the services into the given directory""" out_p = plumbum.local.path(out_dir) cmd.mkdir("-p", out_p) for svc in self("config", "--services").splitlines(): # Collect logs. dst_f = out_p / "%s.log" % svc with open(dst_f, "w") as log_file: cmd.docker.run(args=("logs", svc), stdout=log_file, stderr=subprocess.STDOUT, retcode=None) # Collect coredupms. coredump_f = out_p / "%s.coredump" % svc try: cmd.docker.run(args=("cp", svc + ":/share/coredump", coredump_f)) except Exception: # If the coredump does not exist, do nothing. pass # Collect tshark traces. try: cmd.docker.run(args=("cp", svc + ":/share/tshark", out_p)) cmd.mv(out_p / "tshark" // "*", out_p) cmd.rmdir(out_p / "tshark") except Exception: # If there are no tshark captures, do nothing. pass
def build(self): import sys # Don't do something when running non-interactive. if not sys.stdout.isatty(): return from plumbum import FG from pprof.utils.downloader import update_hash from logging import info from pprof.settings import config root = config["tmpdir"] src_file = self.src_file + ".new" with local.cwd(self.builddir): mkdir("-p", "pprof-src") w_pprof_src = uchroot("-m", "{}:pprof-src".format(config[ "sourcedir"])) pip_in_uchroot = w_pprof_src["/usr/bin/pip3"] pip_in_uchroot["install", "--upgrade", "/pprof-src/"] & FG tgt_path = path.join(root, self.src_file) tgt_path_new = path.join(root, src_file) tar("cjf", tgt_path_new, ".") update_hash(src_file, root) mv(path.join(root, src_file), tgt_path)
def configure(self): with local.cwd(self.builddir): with open("etc/portage/make.conf", 'w') as makeconf: lines = ''' PATH="/llvm/bin:/pprof/bin:${PATH}" LD_LIBRARY_PATH="/llvm/lib:/pprof/lib:${LD_LIBRARY_PATH}" CFLAGS="-O2 -pipe" CXXFLAGS="${CFLAGS}" FEATURES="-sandbox -usersandbox -usersync -xattr" CC="/llvm/bin/clang" CXX="/llvm/bin/clang++" PORTAGE_USERNAME = "******" PORTAGE_GRPNAME = "root" PORTAGE_INST_GID = 0 PORTAGE_INST_UID = 0 CHOST="x86_64-pc-linux-gnu" USE="bindist mmx sse sse2" PORTDIR="/usr/portage" DISTDIR="${PORTDIR}/distfiles" PKGDIR="${PORTDIR}/packages" ''' makeconf.write(lines) mkdir("-p", "etc/portage/metadata") with open("etc/portage/metadata/layout.conf", 'w') as layoutconf: lines = '''masters = gentoo''' layoutconf.write(lines) cp("/etc/resolv.conf", "etc/resolv.conf")
def configure_openmp(self, openmp_path): """ Configure LLVM/Clang's own OpenMP runtime. """ from plumbum.cmd import cmake with local.cwd(openmp_path): builddir = os.path.join(openmp_path, "build") if not os.path.exists(builddir): mkdir(builddir) with local.cwd(builddir): cmake_cache = os.path.join(builddir, "CMakeCache.txt") install_path = os.path.join(self._builddir, "install") openmp_cmake = cmake[ "-DCMAKE_INSTALL_PREFIX=" + install_path, "-DCMAKE_BUILD_TYPE=Release", "-DCMAKE_USE_RELATIVE_PATHS=On", "-DLIBOMP_ENABLE_ASSERTIONS=Off"] if self._use_make: openmp_cmake = openmp_cmake["-G", "Unix Makefiles"] else: openmp_cmake = openmp_cmake["-G", "Ninja"] if not os.path.exists(cmake_cache): openmp_cmake = configure_compiler(openmp_cmake, use_gcc=False) openmp_cmake = openmp_cmake[openmp_path] else: openmp_cmake = openmp_cmake["."] openmp_cmake()
def mount_via_cron(device, volume_id): logger.info("Ensuring cron mount") mount_script = "/volumes/automount/%s" % volume_id mount_script_inner = "/host_root%s" % mount_script mounted_mark = "/volumes/automount/.mounted-%s" % volume_id mounted_mark_inner = "/host_root%s" % mounted_mark if not local.path('/host_root/volumes/automount').exists(): mkdir('-p', "/host_root/volumes/automount") sh( '-c', "echo '* * * * * root cd / && run-parts --report /volumes/automount' >> /host_root/etc/crontab" ) mkdir('-p', "/host_root/volumes/%s" % volume_id) local.path(mount_script_inner).write("""#!/usr/bin/env sh set -e mount {device} /volumes/{volume_id} touch {mounted_mark} rm {mount_script} """.format(**locals())) chmod('+x', "/host_root%s" % mount_script) @retry(wait_fixed=2000, stop_max_attempt_number=60) def wait_for_mount(): logger.info("Waiting for mount") if local.path(mounted_mark_inner).exists(): local.path(mounted_mark_inner).delete() return True else: raise NotMountedException() wait_for_mount()
def configure_llvm(self, llvm_path): """ Configure LLVM and all subprojects. """ with local.cwd(llvm_path): builddir = os.path.join(llvm_path, "build") if not os.path.exists(builddir): mkdir(builddir) with local.cwd(builddir): cmake_cache = os.path.join(builddir, "CMakeCache.txt") install_path = os.path.join(self._builddir, "install") llvm_cmake = cmake[ "-DCMAKE_INSTALL_PREFIX=" + install_path, "-DCMAKE_BUILD_TYPE=Release", "-DBUILD_SHARED_LIBS=Off", "-DCMAKE_USE_RELATIVE_PATHS=On", "-DPOLLY_BUILD_POLLI=On", "-DLLVM_TARGETS_TO_BUILD=X86", "-DLLVM_BINUTILS_INCDIR=/usr/include/", "-DLLVM_ENABLE_PIC=On", "-DLLVM_ENABLE_ASSERTIONS=On", "-DCLANG_DEFAULT_OPENMP_RUNTIME=libomp", "-DCMAKE_CXX_FLAGS_RELEASE='-O3 -DNDEBUG -fno-omit-frame-pointer'"] if self._use_make: llvm_cmake = llvm_cmake["-G", "Unix Makefiles"] else: llvm_cmake = llvm_cmake["-G", "Ninja"] llvm_cmake = configure_papi(llvm_cmake, self._papidir) llvm_cmake = configure_likwid(llvm_cmake, self._likwiddir) llvm_cmake = configure_isl(llvm_cmake, self._isldir) if not os.path.exists(cmake_cache): llvm_cmake = configure_compiler(llvm_cmake, self._use_gcc) llvm_cmake = llvm_cmake[llvm_path] else: llvm_cmake = llvm_cmake["."] llvm_cmake()
def main(self, awsId, awsSecret, awsRegion): mkdir("-p", CONFIG_DIR) # save aws credentials credentials = awsCredentials(id = awsId, secret = awsSecret) ((cat << credentials) > CONFIG_DIR + "/credentials")() # save aws region config config = awsConfig(region = awsRegion) ((cat << config) > CONFIG_DIR + "/config")()
def collect_logs(self, out_dir: str = "logs/docker"): """Collects the logs from the services into the given directory""" out_p = plumbum.local.path(out_dir) cmd.mkdir("-p", out_p) for svc in self("config", "--services").splitlines(): dst_f = out_p / "%s.log" % svc (cmd.docker_compose["-f", self.compose_file, "-p", self.project, "--no-ansi", "logs", svc] > dst_f)()
def collect_logs(self, out_dir: str = 'logs/docker'): """Collects the logs from the services into the given directory""" out_p = local.path(out_dir) mkdir('-p', out_p) for svc in self('config', '--services').splitlines(): dst_f = out_p / '%s.log' % svc with open(dst_f, 'w') as log_f: log_f.write(self('logs', svc))
def _update_dir(self, path, clear_dir=False): "Create dir and remove links" if not path.exists(): logger.debug('mkdir %s' % path) mkdir(path) if clear_dir: with local.cwd(path): logger.debug('rm all links from %s' % path) find['.']['-type', 'l']['-delete']()
def extract(t, source, dest): if dest.exists(): return dest_dir = dest.dirname mkdir('-p', dest_dir) source_file = t.extractfile(source) with open(str(dest), "w") as dest_file: dest_file.write(source_file.read()) source_file.close()
def prepare_directories(dirs): """ Make sure that the required directories exist. Args: dirs - the directories we want. """ for directory in dirs: mkdir("-p", directory, retcode=None)
def prepare(self): """ Prepare the experiment. This includes creation of a build directory and setting up the logging. Afterwards we call the prepare method of the project. """ if not path.exists(self.builddir): mkdir(self.builddir, retcode=None) self.map_projects(self.prepare_project, "prepare")
def collect_logs(self, out_dir: str = "logs/docker"): """Collects the logs from the services into the given directory""" out_p = plumbum.local.path(out_dir) cmd.mkdir("-p", out_p) for svc in self("config", "--services").splitlines(): dst_f = out_p / "%s.log" % svc with open(dst_f, "w") as log_file: cmd.docker.run(args=("logs", svc), stdout=log_file, stderr=subprocess.STDOUT, retcode=None)
def main(self): from logging import getLogger, INFO from pprof.experiment import ExperimentRegistry project_names = self._project_names group_name = self._group_name root = getLogger() root.setLevel(INFO) if self._list_experiments: for exp_name in ExperimentRegistry.experiments: exp_cls = ExperimentRegistry.experiments[exp_name] print(exp_cls.NAME) docstring = exp_cls.__doc__ or "-- no docstring --" print((" " + docstring)) exit(0) if self._list: for exp_name in self._experiment_names: exp_cls = ExperimentRegistry.experiments[exp_name] exp = exp_cls(self._project_names, self._group_name) print_projects(exp) exit(0) if (self.show_config): from pprof.settings import print_settings print_settings(config) exit(0) if self._project_names: # Only try to create the build dir if we're actually running some projects. builddir = os.path.abspath(config["builddir"]) if not os.path.exists(builddir): response = query_yes_no( "The build directory {dirname} does not exist yet. Create it?".format( dirname=builddir), "no") if response: mkdir("-p", builddir) for exp_name in self._experiment_names: print("Running experiment: " + exp_name) name = exp_name.lower() if exp_name in ExperimentRegistry.experiments: exp_cls = ExperimentRegistry.experiments[exp_name] exp = exp_cls(project_names, group_name) exp.clean() exp.prepare() exp.run() else: from logging import error error("Could not find {} in the experiment registry.", exp_name)
def collect_logs(self, out_dir: str = 'logs/docker'): """Collects the logs from the services into the given directory""" out_p = local.path(out_dir) mkdir('-p', out_p) for svc in self('config', '--services').splitlines(): dst_f = out_p / '%s.log' % svc with local.env(BASE_DIR=self.base_dir, COMPOSE_FILE=self.compose_file): with redirect_stderr(sys.stdout): (docker_compose['-p', 'acceptance_scion', '--no-ansi', 'logs', svc] > dst_f)()
def refresh_dir(dir, cleanup=True, cleanup_anyways=False): rm('-rf', dir) mkdir('-p', dir) with local.cwd(dir): try: yield if cleanup: rm('-rf', dir) except: if cleanup_anyways: rm('-rf', dir) raise
def configure(self): from benchbuild.utils.compiler import lt_clang from benchbuild.utils.run import run from plumbum.cmd import mkdir tcc_dir = path.join(self.builddir, self.src_dir) with local.cwd(self.builddir): mkdir("build") clang = lt_clang(self.cflags, self.ldflags, self.compiler_extension) with local.cwd(path.join(self.builddir, "build")): configure = local[path.join(tcc_dir, "configure")] run(configure["--cc=" + str(clang), "--libdir=/usr/lib64"])
def setup_prepare(self): """Unpacks the topology and loads local docker images. """ # Delete old artifacts, if any. cmd.rm("-rf", self.test_state.artifacts) cmd.mkdir(self.test_state.artifacts) print('artifacts dir: %s' % self.test_state.artifacts) self._unpack_topo() print(cmd.docker('image', 'load', '-i', self.test_state.containers_tar)) # Define where coredumps will be stored. print( cmd.docker("run", "--rm", "--privileged", "alpine", "sysctl", "-w", "kernel.core_pattern=/share/coredump"))
def test_md5_hasher(builddir): with local.cwd(builddir): sh.touch('testfile') sh.mkdir('testdir') sh.touch('testdir/testfile') sh.ln('-s', 'testdir', 'testdirlink') sh.ln('-s', 'testfile', 'testlink') sh.ln('-s', 'nofile', 'testlink_nofile') assert md5_hasher('nofile') == None assert md5_hasher('testfile') == EMPY_FILE_MD5 assert md5_hasher('testdir') == md5func('testdir').hexdigest() assert md5_hasher('testlink') == EMPY_FILE_MD5 assert md5_hasher('testdirlink') == md5func('testdir').hexdigest() assert md5_hasher('testlink_nofile') == md5func('nofile').hexdigest()
def setup_prepare(self): """Unpacks loads local docker images and generates the topology. """ # Delete old artifacts, if any. cmd.rm("-rf", self.test_state.artifacts) cmd.mkdir(self.test_state.artifacts) print("artifacts dir: %s" % self.test_state.artifacts) for tar in self.test_state.containers_tars: print(cmd.docker("image", "load", "-i", tar)) # Define where coredumps will be stored. print( cmd.docker("run", "--rm", "--privileged", "alpine", "sysctl", "-w", "kernel.core_pattern=/share/coredump")) self._setup_generate()
def uchroot(*args, **kwargs): """ Returns a uchroot command which can be called with other args to be executed in the uchroot. Args: args: List of additional arguments for uchroot (typical: mounts) Return: chroot_cmd """ from benchbuild.settings import CFG mkdir("-p", "llvm") uchroot_cmd = uchroot_no_llvm(args, kwargs) uchroot_cmd = uchroot_cmd["-m", str(CFG["llvm"]["dir"]) + ":llvm"] uchroot_cmd = uchroot_cmd.setenv(LD_LIBRARY_PATH="/llvm/lib") return uchroot_cmd["--"]
def parse_table(name): log('Starting parsing for "{}" table.'.format(name)) url = 's3://sva.s2-new.opendsp.com/user=man/table={}/{}/'.format(name, s3date) log('Using remote path: {}'.format(url)) directory = '{}/{}'.format(temp, time()) log('Using local path: {}'.format(directory)) mkdir('-p', directory) log('Directory "{}" has been created.'.format(directory)) log('Downloading logs.') print s3cmd('sync', url, directory) log('Unzipping logs.') print gunzip('-rv', directory) all_logs = local.path(directory) // 'man.{}.*.log'.format(name) log('Available logs:\n{}'.format('\n'.join(all_logs))) stdout = (cut['-f8', all_logs] | sort | uniq['-c'])() log('Status from log:\n{}'.format(stdout)) errors = finditer(r'(?P<amount>\d+)\s+ERROR:\s+(?P<code>\d+)', stdout) log('Parsing lines.') for err in errors: code = int(err.group('code')) amount = int(err.group('amount')) allowed = checks[code] log('Code {} -> Amount {} | Allowed {}'.format(code, amount, allowed)) if code in checks and amount > allowed: error = 'Date: {}. File: "man.{}.enr.log". Error Code {}: amount {} exceeded allowed {}.' \ ''.format(date.strftime('%Y/%m/%d'), name, code, amount, allowed) log(error) send_notification(error) else: log('Everything is ok.') log('Removing "{}" directory.'.format(directory)) rm('-rf', directory) log('Done for "{}" table.'.format(name)) print '\n'
def createBuildFolderAndChDir(libDir,clockName): cwd = local.cwd from plumbum.cmd import mkdir, rm #create a build dir and use it as cwd mkdir("-p","build") buildPath = cwd / "build" cwd.chdir(buildPath) #configure via templates import generate_buildfiles templateDir = libDir / "templates" generate_buildfiles.make_files(str(buildPath), str(templateDir),clockName)
def testBuild(target='lyte.py', fileNames=['lyte.py', 'pyonly.py']): local.cwd.chdir(initialDir) global testDirName, fileName testDirName = target.replace('.py', '') + timestamp print(f'\n\nTESTBUILD: making {testDirName}') mkdir(testDirName) for fileName in fileNames: print(f'TESTBUILD: copying {fileName} -> {testDirName}/{fileName}') copy(fileName, f'{testDirName}/{fileName}') local.cwd.chdir(testDirName) print(pwd()) print(f'TESTBUILD: running ipython3 {testDirName}/{target}\n') ipython3 = local['ipython3'] ipython3(target)
def configure(self): with local.cwd(self.builddir): self.write_bashrc("etc/portage/bashrc") self.write_makeconfig("etc/portage/make.conf") self.write_wgetrc("etc/wgetrc") mkdir("-p", "etc/portage/metadata") self.write_layout("etc/portage/metadata/layout.conf") cp("/etc/resolv.conf", "etc/resolv.conf") config_file = CFG["config_file"].value() if path.exists(str(config_file)): cp(config_file, path.basename(config_file)) wrap_cc_in_uchroot(self.cflags, self.ldflags, self.compiler_extension, "/llvm/bin") wrap_cxx_in_uchroot(self.cflags, self.ldflags, self.compiler_extension, "/llvm/bin")
def main(self): # Only try to create the build dir if we're actually running some projects. builddir = os.path.abspath(config["builddir"]) if not os.path.exists(builddir): response = query_yes_no( "The build directory {dirname} does not exist yet. Create it?".format( dirname=builddir), "no") if response: mkdir("-p", builddir) from pprof.project import ProjectRegistry from pprof.projects.gentoo import gentoo from pprof.experiments import empty exp = empty.Empty(["stage3"]) exp.clean() exp.prepare() exp.run()
def configure(self): from pprof.utils.compiler import lt_clang, lt_clang_cxx from pprof.utils.run import run from plumbum.cmd import mkdir js_dir = path.join(self.builddir, self.src_dir, "js", "src") with local.cwd(self.builddir): clang = lt_clang(self.cflags, self.ldflags, self.compiler_extension) clang_cxx = lt_clang_cxx(self.cflags, self.ldflags, self.compiler_extension) with local.cwd(js_dir): autoconf = local["autoconf"] autoconf() mkdir("build_OPT.OBJ") with local.cwd("build_OPT.OBJ"): with local.env(CC=str(clang), CXX=str(clang_cxx)): configure = local["../configure"] run(configure)
def main(self): print("Building in: {0:s}".format(self._builddir)) if not os.path.exists(self._builddir): mkdir(self._builddir) llvm_path = os.path.join(self._builddir, "benchbuild-llvm") openmp_path = os.path.join(self._builddir, "openmp-runtime") with local.cwd(self._builddir): clone_or_pull(CFG['repo']['llvm'], llvm_path) tools_path = os.path.join(llvm_path, "tools") with local.cwd(tools_path): clone_or_pull(CFG['repo']['clang'], os.path.join(tools_path, "clang")) clone_or_pull(CFG['repo']['polly'], os.path.join(tools_path, "polly")) polli_path = os.path.join(tools_path, "polly", "tools") with (local.cwd(polli_path)): clone_or_pull(CFG['repo']['polli'], os.path.join(polli_path, "polli")) clone_or_pull(CFG['repo']['openmp'], openmp_path) self.configure_llvm(llvm_path) self.configure_openmp(openmp_path) build_cmd = None if self._use_make: build_cmd = local["make"] else: build_cmd = local["ninja"] if self._num_jobs: build_cmd = build_cmd["-j", self._num_jobs] print("Building LLVM.") build_llvm = build_cmd["-C", os.path.join(llvm_path, "build"), "install"] build_llvm() print("Building OpenMP.") build_openmp = build_cmd["-C", os.path.join(openmp_path, "build"), "install"] build_openmp()
def test_symlink(builddir, runner): builder = BuildFile(build_dir=builddir, runner=runner) with local.cwd(builddir): sh.touch('testfile') sh.mkdir('testdir') ###### First build ########## builder.main(command_line=['-D', 'build']) expected_json = { ".deps_version": 2, "ln -s nofile testlink_nofile": { "testlink_nofile": "output-" }, "ln -s testdir testlink_dir": { "testlink_dir": "output-" }, "ln -s testfile testlink": { "testlink": "output-" } } # assertions with local.cwd(builddir): assert_same_json('.deps', expected_json) assert os.path.islink('testlink') assert os.path.realpath('testlink').endswith('/testfile') assert os.path.islink('testlink_dir') assert os.path.realpath('testlink_dir').endswith('/testdir') assert os.path.islink('testlink_nofile') assert not os.path.isfile('testlink_nofile') sys.exit.assert_called_once_with(0) ###### Cleaning ########## builder.main(command_line=['-D', 'clean']) with local.cwd(builddir): assert not os.path.isfile('.deps') assert not os.path.islink('testlink') assert not os.path.islink('testlink_dir') assert not os.path.islink('testlink_nofile')
def install_uchroot(self): builddir = settings.CFG["build_dir"].value() with local.cwd(builddir): if not os.path.exists("erlent/.git"): git("clone", "[email protected]:PolyJIT/erlent") else: with local.cwd("erlent"): git("pull", "--rebase") mkdir("-p", "erlent/build") with local.cwd("erlent/build"): from plumbum.cmd import cmake, make, cp cmake("../") make() erlent_path = os.path.abspath(os.path.join(builddir, "erlent", "build")) os.environ["PATH"] = os.path.pathsep.join([erlent_path, os.environ[ "PATH"]]) local.env.update(PATH=os.environ["PATH"]) if not find_package("uchroot"): sys.exit(-1) settings.CFG["env"]["lookup_path"].value().append(erlent_path)
def handleSystemExt4Image(self, systemImageFilename): logger.debug('Searching for files which match pattern "%s"' % self.broadcomPattern) directory = './mounted-image' mkdir(directory) mount(systemImageFilename, directory) firmwares = find(directory, '-iname', self.broadcomPattern).splitlines() for firmware in firmwares: print 'Found firmware: %s (Size: %d)' % (firmware, os.path.getsize(firmware)) versionCommand = strings[firmware] | tail['-1'] print versionCommand() if not firmwares: print 'No firmwares found.' umount(directory) rm('-rf', directory)
def test_webServer(): """kill the server if it's alive then start it and point it to a randomly named directory. To prove its in the randomly named directory, put a randomly-named file in it it and see if it shows up in the server. """ try: killServer(8081) except: pass import random randomDirName = str(random.randint(10000, 99999)) mkdir(randomDirName) randomFileName = str(random.randint(10000, 99999)) touch(f'{randomDirName}/{randomFileName}') #create file in local directory webServer(randomDirName) sleep(1) assert randomFileName in requests.get( f'http://localhost:8081/{randomDirName}').text delete(randomDirName)
def main(self): from benchbuild.utils.schema import Session, RegressionTest from plumbum.cmd import mkdir, sed prefix = CFG["regression-prefix"] if not os.path.exists(prefix): mkdir("-p", prefix) session = Session() for elem in session.query(RegressionTest).order_by( RegressionTest.project_name): sub_dir = os.path.join(prefix, elem.project_name) if not os.path.exists(sub_dir): mkdir("-p", sub_dir) test_path = os.path.join(sub_dir, elem.name + ".ll") with open(test_path, 'w') as test_f: test_f.write(""" ; RUN: opt {opt_flags} < %s 2>&1 | FileCheck %s """.format(opt_flags=" ".join(self.opt_flags()))) test_f.write(self.get_check_line(test_path, elem.module)) test_f.write(elem.module) (sed["-i", r"0,/\#0/s///", test_path])()
def main(self, *args): log.configure() LOG = logging.getLogger() LOG.setLevel({ 3: logging.DEBUG, 2: logging.INFO, 1: logging.WARNING, 0: logging.ERROR }[self.verbosity]) settings.update_env() builddir = os.path.abspath(settings.CFG["build_dir"].value()) if not os.path.exists(builddir): response = ask("The build directory {dirname} does not exist yet. " "Should I create it?".format(dirname=builddir)) if response: mkdir("-p", builddir) print("Created directory {0}.".format(builddir)) setup_directories(builddir)
def test_makeHTMLdir(): HTMLdirName = '123' delete( HTMLdirName ) fakeSSname = 'fakeSupportScripts' delete(fakeSSname) mkdir(fakeSSname) scriptNames=['xyz.test', 'xyz2.test'] for scriptName in scriptNames: touch(f'{fakeSSname}/{scriptName}') makeHTMLdir( HTMLdirName , stacheDir = fakeSSname, GLOWPATH='.', scriptNames= scriptNames) assert('supportScripts' in ls( HTMLdirName ).split() ) assert( ls('123/supportScripts').split() == scriptNames ) delete( HTMLdirName ) delete(fakeSSname)
def main(self, srcDir, dstBucket, dstDir): # protect to prevent deleting of all backups if self.keepCount < 2: self.keepCount = 2 s3DirPath = "s3://" + dstBucket + "/" + dstDir if self.verbose: print("Sending backups from", srcDir, "to", s3DirPath, flush = True) # check if bucket exists and create if not lines = (s3["ls"] | grep[dstBucket])().splitlines() if not lines: if self.verbose: print("Bucket doesn't exist. Creating...") (s3["mb", "s3://" + dstBucket] > stdout)() # create dir for processed backup files (if not exists) processedDir = join(srcDir, "_processed") mkdir("-p", processedDir) # process new files for f in listdir(srcDir): fullPath = join(srcDir, f) if isfile(fullPath) and getsize(fullPath) > 0: datePrefix = time.strftime("%Y-%m-%d-", time.localtime(getctime(fullPath))) processedFileName = datePrefix + f mv(fullPath, join(processedDir, processedFileName)) # remove old backups, keep only requested count (--keep) for f in ls("-c", processedDir).splitlines()[self.keepCount:]: if self.verbose: print("Removing old backup", f, flush = True) rm(join(processedDir, f)) # sync to s3 (s3["sync", processedDir, s3DirPath, "--storage-class", "STANDARD_IA", "--delete"] > stdout)()
def main(self): print("Building in: {:s}".format(self._builddir)) if not os.path.exists(self._builddir): mkdir(self._builddir) llvm_path = os.path.join(self._builddir, "pprof-llvm") openmp_path = os.path.join(self._builddir, "openmp-runtime") with local.cwd(self._builddir): clone_or_pull(config["llvm_repo"], llvm_path) tools_path = os.path.join(llvm_path, "tools") with local.cwd(tools_path): clone_or_pull(config["clang_repo"], os.path.join(tools_path, "clang")) clone_or_pull(config["polly_repo"], os.path.join(tools_path, "polly")) polli_path = os.path.join(tools_path, "polly", "tools") with (local.cwd(polli_path)): clone_or_pull(config["polli_repo"], os.path.join(polli_path, "polli")) clone_or_pull(config["openmp_repo"], openmp_path) self.configure_llvm(llvm_path) self.configure_openmp(openmp_path) build_cmd = None if self._use_make: build_cmd = local["make"] else: build_cmd = local["ninja"] if self._num_jobs: build_cmd = build_cmd["-j", self._num_jobs] build_cmd["-C", os.path.join(llvm_path, "build"), "install"] & FG build_cmd["-C", os.path.join(openmp_path, "build"), "install"] & FG
def prepare(self): """ Prepare the build diretory. """ if not path.exists(self.builddir): mkdir(self.builddir)
def cmd_setup(self): cmd.mkdir("-p", self.artifacts)
def _setup_artifacts(self): # Delete old artifacts, if any. cmd.rm("-rf", self.artifacts) cmd.mkdir(self.artifacts) print("artifacts dir: %s" % self.artifacts)
def copy_file(src, dst): cmd.mkdir("-p", os.path.dirname(dst)) cmd.cp("-L", src, dst)
def test_symlink(builddir, runner, end_fabricate): # build.py content >>>>>>>>>>>>>>>>>>>>> def fabricate_file(): def build(): run('ln', '-s', 'testfile', 'testlink') run('ln', '-s', 'testdir', 'testlink_dir') run('ln', '-s', 'nofile', 'testlink_nofile') def clean(): autoclean() return copy(locals()) with local.cwd(builddir): sh.touch('testfile') sh.mkdir('testdir') ###### First build ########## main(globals_dict=fabricate_file(), #parallel_ok=True, build_dir=builddir, runner=runner, command_line=['-D', 'build']) end_fabricate() expected_json = { ".deps_version": 2, "ln -s nofile testlink_nofile": { "testlink_nofile": "output-" }, "ln -s testdir testlink_dir": { "testlink_dir": "output-" }, "ln -s testfile testlink": { "testlink": "output-" } } # assertions with local.cwd(builddir): assert_same_json('.deps', expected_json) assert os.path.islink('testlink') assert os.path.realpath('testlink').endswith('/testfile') assert os.path.islink('testlink_dir') assert os.path.realpath('testlink_dir').endswith('/testdir') assert os.path.islink('testlink_nofile') assert not os.path.isfile('testlink_nofile') sys.exit.assert_called_once_with(0) ###### Cleaning ########## main(globals_dict=fabricate_file(), #parallel_ok=True, #jobs=4, build_dir=builddir, runner=runner, command_line=['-D', 'clean']) end_fabricate() with local.cwd(builddir): assert not os.path.isfile('.deps') assert not os.path.islink('testlink') assert not os.path.islink('testlink_dir') assert not os.path.islink('testlink_nofile')
def main(self): """Main entry point of benchbuild run.""" project_names = self._project_names group_name = self._group_name experiments.discover() registry = experiment.ExperimentRegistry exps = registry.experiments if self._list_experiments: for exp_name in registry.experiments: exp_cls = exps[exp_name] print(exp_cls.NAME) docstring = exp_cls.__doc__ or "-- no docstring --" print((" " + docstring)) exit(0) if self._list: for exp_name in self._experiment_names: exp_cls = exps[exp_name] exp = exp_cls(self._project_names, self._group_name) print_projects(exp) exit(0) if self.show_config: print(repr(CFG)) exit(0) if self.store_config: config_path = ".benchbuild.json" CFG.store(config_path) print("Storing config in {0}".format(os.path.abspath(config_path))) exit(0) if self._project_names: builddir = os.path.abspath(str(CFG["build_dir"])) if not os.path.exists(builddir): response = True if sys.stdin.isatty(): response = ui.query_yes_no( "The build directory {dirname} does not exist yet." "Should I create it?".format(dirname=builddir), "no") if response: mkdir("-p", builddir) print("Created directory {0}.".format(builddir)) actns = [] for exp_name in self._experiment_names: if exp_name in exps: exp_cls = exps[exp_name] exp = exp_cls(project_names, group_name) eactn = Experiment(exp, exp.actions()) actns.append(eactn) else: from logging import error error("Could not find {} in the experiment registry.", exp_name) num_actions = sum([len(x) for x in actns]) print("Number of actions to execute: {}".format(num_actions)) for a in actns: print(a) print() if not self.pretend: for a in actns: a()
def test_mkdir(builddir, runner): # prepare needed files with local.cwd(builddir): sh.mkdir('existingdir') sh.touch('existingdir/existingfile') builder = BuildFile(build_dir=builddir, runner=runner) builder.main(command_line=['-D', 'build']) #, parallel_ok=True) expected_json = { ".deps_version": 2, "mkdir -p existingdir/a": { "existingdir": "input-ae394c47b4ccf49007dc9ec847f657b9", "existingdir/a": "output-16873f5a4ba5199a8b51f812d159e37e" }, "mkdir -p testdir/c/d": { "testdir": "input-3ca0a3620b59afb57cf5fd77cee6432c", "testdir/c": "input-54a9057bcd619534a49f669dd5ed3078", "testdir/c/d": "input-fdb1b8414eeab993acc5623371c43a71" }, "mkdir -p testdir/c/g": { "testdir": "input-3ca0a3620b59afb57cf5fd77cee6432c", "testdir/c": "input-54a9057bcd619534a49f669dd5ed3078", "testdir/c/g": "output-c512be1476c9253326e479827c491f7f" }, "mkdir testdir": { "testdir": "output-3ca0a3620b59afb57cf5fd77cee6432c" }, "mkdir testdir/a": { "testdir/a": "output-832651e32363cb4b115b074240cd08b5" }, "mkdir testdir/b": { "testdir/b": "output-0432d5c3dc41495725df46eeeedb1386" }, "mkdir testdir/c": { "testdir/c": "output-54a9057bcd619534a49f669dd5ed3078" }, "mkdir testdir/c/d": { "testdir/c/d": "output-fdb1b8414eeab993acc5623371c43a71" }, "mkdir testdir/c/e": { "testdir/c/e": "output-eadea986453292aaa62ccde2312c3413" }, "mkdir testdir/c/f": { "testdir/c/f": "output-5d7c7f98e6d795bbb252f6866c8d7850" }, "touch testdir/b/f1": { "testdir/b/f1": "output-d41d8cd98f00b204e9800998ecf8427e" }, "touch testdir/b/f2": { "testdir/b/f2": "output-d41d8cd98f00b204e9800998ecf8427e" }, "touch testdir/c/d/f1": { "testdir/c/d/f1": "output-d41d8cd98f00b204e9800998ecf8427e" }, "touch testdir/c/d/f2": { "testdir/c/d/f2": "output-d41d8cd98f00b204e9800998ecf8427e" }, "touch testdir/f1": { "testdir/f1": "output-d41d8cd98f00b204e9800998ecf8427e" }, "touch testdir/f2": { "testdir/f2": "output-d41d8cd98f00b204e9800998ecf8427e" } } # assertions with local.cwd(builddir): assert_json_equality('.deps', expected_json) assert os.path.isdir('testdir/c/g') assert os.path.isfile('testdir/c/d/f2') assert os.path.isdir('existingdir/a') sys.exit.assert_called_once_with(0) builder.main(command_line=['-D', 'clean']) #parallel_ok=True, #jobs=4, with local.cwd(builddir): assert not os.path.isdir('testdir') assert os.path.isdir('existingdir') assert os.path.isfile('existingdir/existingfile') assert not os.path.isdir('existingdir/a')
from plumbum.cmd import mkdir, cp, git, vagrant script_path_inspect = inspect.getfile(inspect.currentframe()) # The absolute path to the source code directory. source_path = os.path.dirname(os.path.abspath(script_path_inspect)) # The absolute path to the current working directory, where this script # is being launched. current_path = local.cwd # Take in the first argument or option which is the project directory. project_dir = sys.argv[1] # Define absolute path to project directory. project_path = current_path + "/" + project_dir with colors.orchid: print "========== Setting up your project directory ==========" mkdir(project_dir) with colors.orchid: print "========== Installing .gitignore ==========" cp(source_path + "/.gitignore", project_path) with colors.orchid: print "========== Installing Procfile and Runtime for Heroku ==========" cp(source_path + "/Procfile", project_path) cp(source_path + "/runtime.txt", project_path) with colors.orchid: print "========== Installing Vagrant environment ==========" cp(source_path + "/Vagrantfile", project_path) # Install provisioning script. cp(source_path + "/provision.sh", project_path)
def cmd_setup(self): cmd.mkdir('-p', self.artifacts)
def setup_directories(builddir): with local.cwd(builddir): if not os.path.exists("container-in"): mkdir("-p", "container-in") if not os.path.exists("container-out"): mkdir("-p", "container-out")
def test_mkdir(builddir, runner, end_fabricate): # build.py content >>>>>>>>>>>>>>>>>>>>> def fabricate_file(): def build(): # Make lots of directories to check ordered delete run('mkdir', 'testdir', group='testdir') run('mkdir', 'testdir/a', group='a', after='testdir') run('mkdir', 'testdir/b', group='b', after='testdir') run('mkdir', 'testdir/c', group='c', after='testdir') run('mkdir', 'testdir/c/f', group='f', after='c') run('mkdir', 'testdir/c/e', group='e', after='c') run('mkdir', 'testdir/c/d', group='d', after='c') # put some files in them to ensure content deleted before dir run('touch', 'testdir/f1', after='testdir') run('touch', 'testdir/f2', after='testdir') run('touch', 'testdir/b/f1', after='b') run('touch', 'testdir/b/f2', after='b') run('touch', 'testdir/c/d/f1', after='d') run('touch', 'testdir/c/d/f2', after='d') # make a dir that alreay exists run('mkdir', '-p', 'testdir/c/d', after='d') # make a dir that already partialy exists run('mkdir', '-p', 'testdir/c/g', after='c') # make a dir that already partialy exists but should not be deleted run('mkdir', '-p', 'existingdir/a') def clean(): autoclean() return copy(locals()) with local.cwd(builddir): sh.mkdir('existingdir') sh.touch('existingdir/existingfile') main(globals_dict=fabricate_file(), #parallel_ok=True, build_dir=builddir, runner=runner, command_line=['-D', 'build']) end_fabricate() expected_json = { ".deps_version": 2, "mkdir -p existingdir/a": { "existingdir": "input-ae394c47b4ccf49007dc9ec847f657b9", "existingdir/a": "output-16873f5a4ba5199a8b51f812d159e37e" }, "mkdir -p testdir/c/d": { "testdir": "input-3ca0a3620b59afb57cf5fd77cee6432c", "testdir/c": "input-54a9057bcd619534a49f669dd5ed3078", "testdir/c/d": "input-fdb1b8414eeab993acc5623371c43a71" }, "mkdir -p testdir/c/g": { "testdir": "input-3ca0a3620b59afb57cf5fd77cee6432c", "testdir/c": "input-54a9057bcd619534a49f669dd5ed3078", "testdir/c/g": "output-c512be1476c9253326e479827c491f7f" }, "mkdir testdir": { "testdir": "output-3ca0a3620b59afb57cf5fd77cee6432c" }, "mkdir testdir/a": { "testdir/a": "output-832651e32363cb4b115b074240cd08b5" }, "mkdir testdir/b": { "testdir/b": "output-0432d5c3dc41495725df46eeeedb1386" }, "mkdir testdir/c": { "testdir/c": "output-54a9057bcd619534a49f669dd5ed3078" }, "mkdir testdir/c/d": { "testdir/c/d": "output-fdb1b8414eeab993acc5623371c43a71" }, "mkdir testdir/c/e": { "testdir/c/e": "output-eadea986453292aaa62ccde2312c3413" }, "mkdir testdir/c/f": { "testdir/c/f": "output-5d7c7f98e6d795bbb252f6866c8d7850" }, "touch testdir/b/f1": { "testdir/b/f1": "output-d41d8cd98f00b204e9800998ecf8427e" }, "touch testdir/b/f2": { "testdir/b/f2": "output-d41d8cd98f00b204e9800998ecf8427e" }, "touch testdir/c/d/f1": { "testdir/c/d/f1": "output-d41d8cd98f00b204e9800998ecf8427e" }, "touch testdir/c/d/f2": { "testdir/c/d/f2": "output-d41d8cd98f00b204e9800998ecf8427e" }, "touch testdir/f1": { "testdir/f1": "output-d41d8cd98f00b204e9800998ecf8427e" }, "touch testdir/f2": { "testdir/f2": "output-d41d8cd98f00b204e9800998ecf8427e" } } # assertions with local.cwd(builddir): assert_json_equality('.deps', expected_json) assert os.path.isdir('testdir/c/g') assert os.path.isfile('testdir/c/d/f2') assert os.path.isdir('existingdir/a') sys.exit.assert_called_once_with(0) main(globals_dict=fabricate_file(), #parallel_ok=True, #jobs=4, build_dir=builddir, runner=runner, command_line=['-D', 'clean']) end_fabricate() with local.cwd(builddir): assert not os.path.isdir('testdir') assert os.path.isdir('existingdir') assert os.path.isfile('existingdir/existingfile') assert not os.path.isdir('existingdir/a')