def install_sd(sd_version="0.7.6"): with local.tempdir() as d, local.cwd(d): cmd.wget( f"https://github.com/chmln/sd/releases/download/v{sd_version}/sd-v{sd_version}-x86_64-unknown-linux-musl", "-O", "sd") cmd.chmod("+x", "sd") cmd.sudo[cmd.mv["sd", "/usr/local/bin"]]()
def runVpr(vtrDir,vprVersion,timingRun): print 'vtrdDir: ' + str(vtrDir) #get the cwd. Note that we should be in the build dir cwd = local.cwd #the vpr bash script use this enviroment variable to locate the vpr dir local.env["VTR_DIR"] = str(vtrDir) #choose the right tool path, depending on the vpr version if vprVersion == 8: if timingRun: vprPath = cwd / "vpr8_timing.sh" else: vprPath = cwd / "vpr8.sh" elif vprVersion == 7: vprPath = cwd / "vpr7.sh" elif vprVersion == 6: vprPath = cwd / "vpr6.sh" else: print "ERROR: Unsupported vpr version: " + str(vprVersion) sys.exit(1) print 'vpr script path:' + str(vprPath) #because the vpr script was copied, set the x flag from plumbum.cmd import chmod chmod("a+x",str(vprPath)) #load the vpr command and run it vpr = local[vprPath] print vpr()
def run_tests(self, experiment): from benchbuild.utils.run import run exp = experiment(self.run_f) pg_ctl = local[path.join(self.builddir, "pg_ctl")] dropdb = local[path.join(self.builddir, "dropdb")] createdb = local[path.join(self.builddir, "createdb")] pgbench = local[path.join(self.builddir, "pgbench")] bin_name = path.join(self.builddir, self.name + ".sh") test_data = path.join(self.testdir, "test-data") (echo["#!/bin/sh"] >> bin_name) & FG (echo[str(exp)] >> bin_name) & FG chmod("+x", bin_name) num_clients = 1 num_transactions = 1000000 pg_ctl("stop", "-t", 360, "-w", "-D", test_data, retcode=None) try: with local.cwd(test_data): pg_ctl("start", "-p", bin_name, "-w", "-D", test_data) dropdb["pgbench"] & FG(retcode=None) createdb("pgbench") run(pgbench["-i", "pgbench"]) run(pgbench[ "-c", num_clients, "-S", "-t", num_transactions, "pgbench"]) dropdb("pgbench") pg_ctl("stop", "-t", 360, "-w", "-D", test_data) except Exception: pg_ctl("stop", "-t", 360, "-w", "-D", test_data) raise
def installTraining(repo, commit): archive = downloadGithubArchive('pnlbwh/' + repo, commit) archive.move(dest / repo) with local.cwd(dest / repo): from plumbum.cmd import bash bash('./mktrainingcsv.sh', '.') chmod('a-w', '*')
def mount_via_cron(device, volume_id): logger.info("Ensuring cron mount") mount_script = "/volumes/automount/%s" % volume_id mount_script_inner = "/host_root%s" % mount_script mounted_mark = "/volumes/automount/.mounted-%s" % volume_id mounted_mark_inner = "/host_root%s" % mounted_mark if not local.path('/host_root/volumes/automount').exists(): mkdir('-p', "/host_root/volumes/automount") sh( '-c', "echo '* * * * * root cd / && run-parts --report /volumes/automount' >> /host_root/etc/crontab" ) mkdir('-p', "/host_root/volumes/%s" % volume_id) local.path(mount_script_inner).write("""#!/usr/bin/env sh set -e mount {device} /volumes/{volume_id} touch {mounted_mark} rm {mount_script} """.format(**locals())) chmod('+x', "/host_root%s" % mount_script) @retry(wait_fixed=2000, stop_max_attempt_number=60) def wait_for_mount(): logger.info("Waiting for mount") if local.path(mounted_mark_inner).exists(): local.path(mounted_mark_inner).delete() return True else: raise NotMountedException() wait_for_mount()
def mount_via_cron(device, volume_id): logger.info("Ensuring cron mount") mount_script = "/volumes/automount/%s" % volume_id mount_script_inner = "/host_root%s" % mount_script mounted_mark = "/volumes/automount/.mounted-%s" % volume_id mounted_mark_inner = "/host_root%s" % mounted_mark if not local.path('/host_root/volumes/automount').exists(): mkdir('-p', "/host_root/volumes/automount") sh('-c', "echo '* * * * * root cd / && run-parts --report /volumes/automount' >> /host_root/etc/crontab") mkdir('-p', "/host_root/volumes/%s" % volume_id) local.path(mount_script_inner).write( """#!/usr/bin/env sh set -e mount {device} /volumes/{volume_id} touch {mounted_mark} rm {mount_script} """.format(**locals()) ) chmod('+x', "/host_root%s" % mount_script) @retry(wait_fixed=2000, stop_max_attempt_number=60) def wait_for_mount(): logger.info("Waiting for mount") if local.path(mounted_mark_inner).exists(): local.path(mounted_mark_inner).delete() return True else: raise NotMountedException() wait_for_mount()
def main(self): self.package = self.package.rstrip("/") package_path = plumbum.local.path(self.package) name = package_path.name mock_path = plumbum.local.path(package_path / "mock_%s" % name) delete(mock_path // "*.go") buildscript = """ load("@com_github_jmhodges_bazel_gomock//:gomock.bzl", "gomock") gomock( name = "go_default_mock", out = "mock.go", interfaces = %s, library = "//%s:go_default_library", package = "mock_%s", ) """ % (self.interfaces.split(","), self.package, name) pathlib.Path(mock_path).mkdir(parents=True, exist_ok=True) pathlib.Path(mock_path / "BUILD.bazel").write_text(buildscript) mock_rule = "//%s:go_default_mock" % os.path.join( self.package, "mock_%s" % name) bazel = plumbum.local['bazel'] bazel("build", mock_rule) bf, wf = rule_to_file(mock_rule) cmd.cp(bf, wf) cmd.chmod("0644", wf) cmd.make("gazelle")
def update_files(self): rules = mock_rules() bazel = plumbum.local['bazel'] print("building mock files...") bazel("build", rules) for rule in rules: print(rule) bf, wf = rule_to_file(rule) cmd.cp(bf, wf) cmd.chmod("0644", wf)
def main(self): repo = 'https://github.com/demianw/tract_querier.git' with TemporaryDirectory() as tmpdir: clone = local.path(tmpdir) / "tract_querier" if not self.githash: git("clone", "--depth", "1", repo, clone) else: git("clone", repo, clone) clone_hash = git("rev-parse", "--short", "HEAD")[:-1] # remove trailing \n # save 70M of space rm('-r', clone / 'doc') rm('-r', clone / '.git') out = self.prefix / "tract_querier-" + clone_hash clone.move(out) chmod('-R', 'a-w', out)
def main(self): with TemporaryDirectory() as tmpdir, local.cwd(tmpdir): repo = downloadGithubRepo('demianw/tract_querier', self.parent.commit) sha, date = getCommitInfo(repo) # save space (repo / 'doc').delete() (repo / '.git').delete() outdir = local.path(self.parent.dest / 'tract_querier-' + sha) if outdir.exists(): logging.warning(outdir + ' already exists, quitting.') sys.exit(0) logging.info("Make '{outdir}'".format(**locals())) repo.move(outdir) chmod('-R', 'a-w', outdir) chmod('a-w', outdir) date_symlink = self.parent.dest / 'tract_querier-' + date outdir.symlink(date_symlink)
def wrap_dynamic(name, runner, sprefix=None): """ Wrap the binary :name with the function :runner. This module generates a python tool :name: that can replace a yet unspecified binary. It behaves similar to the :wrap: function. However, the first argument is the actual binary name. Args: name: name of the python module runner: Function that should run the real binary Returns: plumbum command, readty to launch. """ import dill name_absolute = path.abspath(name) blob_f = name_absolute + PROJECT_BLOB_F_EXT with open(blob_f, 'wb') as blob: blob.write(dill.dumps(runner)) with open(name_absolute, 'w') as wrapper: lines = '''#!/usr/bin/env python3 # from pprof.project import Project from pprof.experiment import Experiment from plumbum import cli, local from os import path, getenv import sys import dill if not len(sys.argv) >= 2: os.stderr.write("Not enough arguments provided!\\n") os.stderr.write("Got: " + sys.argv + "\\n") sys.exit(1) f = None run_f = sys.argv[1] args = sys.argv[2:] project_name = path.basename(run_f) if path.exists("{blobf}"): with local.env(PPROF_DB_HOST="{db_host}", PPROF_DB_PORT="{db_port}", PPROF_DB_NAME="{db_name}", PPROF_DB_USER="******", PPROF_DB_PASS="******", PPROF_PROJECT=project_name, PPROF_LIKWID_DIR="{likwiddir}", LD_LIBRARY_PATH="{ld_lib_path}", PPROF_CMD=run_f): with open("{blobf}", "rb") as p: f = dill.load(p) if f is not None: exp_name = getenv("PPROF_EXPERIMENT", "unknown") domain_name = getenv("PPROF_DOMAIN", "unknown") group_name = getenv("PPROF_GROUP", "unknwon") e = Experiment(exp_name, [], group_name) p = Project(e, project_name, domain_name, group_name) if not sys.stdin.isatty(): f(run_f, args, has_stdin = True, project_name = project_name) else: f(run_f, args, project_name = project_name) else: sys.exit(1) '''.format(db_host=config["db_host"], db_port=config["db_port"], db_name=config["db_name"], db_user=config["db_user"], db_pass=config["db_pass"], likwiddir=config["likwiddir"], ld_lib_path=config["ld_library_path"], blobf=strip_path_prefix(blob_f, sprefix)) wrapper.write(lines) chmod("+x", name_absolute) return local[name_absolute]
def wrap(name, runner, sprefix=None): """ Wrap the binary :name: with the function :runner:. This module generates a python tool that replaces :name: The function in runner only accepts the replaced binaries name as argument. We use the cloudpickle package to perform the serialization, make sure :runner: can be serialized with it and you're fine. Args: name: Binary we want to wrap runner: Function that should run instead of :name: Returns: A plumbum command, ready to launch. """ import dill name_absolute = path.abspath(name) real_f = name_absolute + PROJECT_BIN_F_EXT mv(name_absolute, real_f) blob_f = name_absolute + PROJECT_BLOB_F_EXT with open(blob_f, 'wb') as blob: dill.dump(runner, blob, protocol=-1, recurse=True) with open(name_absolute, 'w') as wrapper: lines = '''#!/usr/bin/env python3 # from plumbum import cli, local from os import path import sys import dill run_f = "{runf}" args = sys.argv[1:] f = None if path.exists("{blobf}"): with local.env(PPROF_DB_HOST="{db_host}", PPROF_DB_PORT="{db_port}", PPROF_DB_NAME="{db_name}", PPROF_DB_USER="******", PPROF_DB_PASS="******", PPROF_LIKWID_DIR="{likwiddir}", LD_LIBRARY_PATH="{ld_lib_path}", PPROF_CMD=run_f + " ".join(args)): with open("{blobf}", "rb") as p: f = dill.load(p) if f is not None: if not sys.stdin.isatty(): f(run_f, args, has_stdin = True) else: f(run_f, args) else: sys.exit(1) '''.format(db_host=config["db_host"], db_port=config["db_port"], db_name=config["db_name"], db_user=config["db_user"], db_pass=config["db_pass"], likwiddir=config["likwiddir"], ld_lib_path=config["ld_library_path"], blobf=strip_path_prefix(blob_f, sprefix), runf=strip_path_prefix(real_f, sprefix)) wrapper.write(lines) chmod("+x", name_absolute) return local[name_absolute]
def main(self): blddir = self.parent.dest / "BRAINSTools-build" with local.cwd(self.parent.dest): repo = downloadGithubRepo('BRAINSia/BRAINSTools', self.parent.commit) sha, date = getCommitInfo(repo) logging.info("Build code:") blddir.mkdir() with local.cwd(blddir): cmake( repo, "-DBRAINSTools_INSTALL_DEVELOPMENT=OFF", "-DBRAINSTools_MAX_TEST_LEVEL=0", "-DBRAINSTools_SUPERBUILD=ON", "-DBRAINSTools_USE_QT=OFF", "-DBRAINS_DEBUG_IMAGE_WRITE=OFF", "-DBUILD_STYLE_UTILS=OFF", "-DBUILD_TESTING=OFF", "-DCMAKE_BUILD_TYPE=Release", "-DCMAKE_COLOR_MAKEFILE=ON", "-DCMAKE_EXE_LINKER_FLAGS=' '", "-DCMAKE_EXE_LINKER_FLAGS_DEBUG=", "-DCMAKE_EXE_LINKER_FLAGS_MINSIZEREL=", "-DCMAKE_EXE_LINKER_FLAGS_RELEASE=", "-DCMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO=", "-DCMAKE_EXPORT_COMPILE_COMMANDS=OFF", "-DCMAKE_INSTALL_PREFIX:PATH=/usr/local", "-DCMAKE_MODULE_LINKER_FLAGS=' '", "-DCMAKE_MODULE_LINKER_FLAGS_DEBUG=", "-DCMAKE_MODULE_LINKER_FLAGS_MINSIZEREL=", "-DCMAKE_MODULE_LINKER_FLAGS_RELEASE=", "-DCMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO=", "-DCMAKE_PROJECT_NAME:STATIC=SuperBuild_BRAINSTools", "-DCMAKE_SHARED_LINKER_FLAGS=' '", "-DCMAKE_SHARED_LINKER_FLAGS_DEBUG=", "-DCMAKE_SHARED_LINKER_FLAGS_MINSIZEREL=", "-DCMAKE_SHARED_LINKER_FLAGS_RELEASE=", "-DCMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO=", "-DCMAKE_SKIP_INSTALL_RPATH=NO", "-DCMAKE_SKIP_RPATH=NO", "-DCMAKE_STATIC_LINKER_FLAGS=", "-DCMAKE_STATIC_LINKER_FLAGS_DEBUG=", "-DCMAKE_STATIC_LINKER_FLAGS_MINSIZEREL=", "-DCMAKE_STATIC_LINKER_FLAGS_RELEASE=", "-DCMAKE_STATIC_LINKER_FLAGS_RELWITHDEBINFO=", "-DCMAKE_USE_RELATIVE_PATHS=OFF", "-DCMAKE_VERBOSE_MAKEFILE=FALSE", "-DCOVERAGE_EXTRA_FLAGS=-l", "-DCTEST_SUBMIT_RETRY_COUNT=3", "-DCTEST_SUBMIT_RETRY_DELAY=5", "-DDART_TESTING_TIMEOUT=1500", "-DEXTERNAL_PROJECT_BUILD_TYPE=Release", "-DFORCE_EXTERNAL_BUILDS=OFF", "-DITK_VERSION_MAJOR=4", "-DSuperBuild_BRAINSTools_BUILD_DICOM_SUPPORT=ON", "-DSuperBuild_BRAINSTools_USE_CTKAPPLAUNCHER=OFF", "-DSuperBuild_BRAINSTools_USE_GIT_PROTOCOL=ON", "-DUSE_ANTS=ON", "-DUSE_AutoWorkup=OFF", "-DUSE_BRAINSABC=OFF", "-DUSE_BRAINSConstellationDetector=OFF", "-DUSE_BRAINSContinuousClass=OFF", "-DUSE_BRAINSCreateLabelMapFromProbabilityMaps=OFF", "-DUSE_BRAINSCut=OFF", "-DUSE_BRAINSDWICleanup=OFF", "-DUSE_BRAINSDemonWarp=OFF", "-DUSE_BRAINSFit=OFF", "-DUSE_BRAINSInitializedControlPoints=OFF", "-DUSE_BRAINSLabelStats=OFF", "-DUSE_BRAINSLandmarkInitializer=OFF", "-DUSE_BRAINSMultiModeSegment=OFF", "-DUSE_BRAINSMultiSTAPLE=OFF", "-DUSE_BRAINSMush=OFF", "-DUSE_BRAINSPosteriorToContinuousClass=OFF", "-DUSE_BRAINSROIAuto=OFF", "-DUSE_BRAINSResample=OFF", "-DUSE_BRAINSSnapShotWriter=OFF", "-DUSE_BRAINSStripRotation=OFF", "-DUSE_BRAINSSurfaceTools=OFF", "-DUSE_BRAINSTalairach=OFF", "-DUSE_BRAINSTransformConvert=OFF", "-DUSE_ConvertBetweenFileFormats=ON", "-DUSE_DWIConvert=ON", "-DUSE_DebugImageViewer=OFF", "-DUSE_GTRACT=OFF", "-DUSE_ICCDEF=OFF", "-DUSE_ImageCalculator=OFF", "-DUSE_ReferenceAtlas=OFF", "-DUSE_SYSTEM_DCMTK=OFF", "-DUSE_SYSTEM_ITK=OFF", "-DUSE_SYSTEM_SlicerExecutionModel=OFF", "-DUSE_SYSTEM_VTK=OFF", "-DVTK_GIT_REPOSITORY=git://vtk.org/VTK.git") make['all'] & FG out = self.parent.dest / 'BRAINSTools-bin-' + sha symlink = self.parent.dest / 'BRAINSTools-bin-' + date (blddir / 'bin').move(out) (blddir / 'ANTs/Scripts/antsRegistrationSyN.sh').copy(out) chmod('a-w', out // '*') chmod('a-w', out) outdir.symlink(symlink)
def print_libtool_sucks_wrapper(filepath, cflags, ldflags, compiler, func): """ Substitute a compiler with a script that hides CFLAGS & LDFLAGS. This will generate a wrapper script in the current directory and return a complete plumbum command to it. Args: filepath (str): Path to the wrapper script. cflags (list(str)): The CFLAGS we want to hide. ldflags (list(str)): The LDFLAGS we want to hide. compiler (plumbum.cmd): Real compiler command we should call in the script. Returns (plumbum.cmd): Command of the new compiler we can call. """ from plumbum.cmd import chmod import dill from pprof.project import PROJECT_BLOB_F_EXT from os.path import abspath blob_f = abspath(filepath + PROJECT_BLOB_F_EXT) if func is not None: with open(blob_f, 'wb') as blob: blob.write(dill.dumps(func)) with open(filepath, 'w') as wrapper: lines = '''#!/usr/bin/env python3 # from plumbum import ProcessExecutionError, local, FG from plumbum.commands.modifiers import TEE from pprof.utils.run import GuardedRunException from os import path import logging import dill from pprof.settings import config config["db_host"] = "{db_host}" config["db_port"] = "{db_port}" config["db_name"] = "{db_name}" config["db_user"] = "******" config["db_pass"] = "******" cc=local[\"{CC}\"] cflags={CFLAGS} ldflags={LDFLAGS} from sys import argv import os import sys log = logging.getLogger("clang") log.addHandler(logging.StreamHandler(stream=sys.stderr)) def really_exec(cmd): from plumbum.cmd import timeout try: log.info("Trying - %s", str(cmd)) return ( timeout["2m", cmd.formulate()] & TEE ) except (GuardedRunException, ProcessExecutionError) as e: log.error("Failed to execute - %s", str(cmd)) raise e def call_original_compiler(input_files, cc, cflags, ldflags, flags): final_command = None retcode=0 try: if len(input_files) > 0: if "-c" in flags: final_command = cc["-Qunused-arguments", cflags, ldflags, flags] else: final_command = cc["-Qunused-arguments", cflags, flags, ldflags] else: final_command = cc["-Qunused-arguments", flags] retcode, stdout, stderr = really_exec(final_command) except (GuardedRunException, ProcessExecutionError) as e: log.warn("Fallback to original flags and retry.") final_command = cc[flags, ldflags] log.warn("New Command: %s", str(final_command)) retcode, _, _ = really_exec(final_command) return (retcode, final_command) input_files = [ x for x in argv[1:] if not '-' is x[0] ] flags = argv[1:] f = None retcode, final_cc = call_original_compiler(input_files, cc, cflags, ldflags, flags) with local.env(PPROF_DB_HOST="{db_host}", PPROF_DB_PORT="{db_port}", PPROF_DB_NAME="{db_name}", PPROF_DB_USER="******", PPROF_DB_PASS="******"): """ FIXME: This is just a quick workaround. """ if "conftest.c" not in input_files: with local.env(PPROF_CMD=str(final_cc)): if path.exists("{blobf}"): with open("{blobf}", "rb") as p: f = dill.load(p) if f is not None: if not sys.stdin.isatty(): f(final_cc, has_stdin = True) else: f(final_cc) sys.exit(retcode) '''.format(CC=str(compiler()), CFLAGS=cflags, LDFLAGS=ldflags, blobf=blob_f, db_host=config["db_host"], db_name=config["db_name"], db_user=config["db_user"], db_pass=config["db_pass"], db_port=config["db_port"]) wrapper.write(lines) chmod("+x", filepath)
def dump_slurm_script(script_name, benchbuild, experiment, projects): """ Dump a bash script that can be given to SLURM. Args: script_name (str): name of the bash script. commands (list(plumbum.cmd)): List of plumbum commands to write to the bash script. **kwargs: Dictionary with all environment variable bindings we should map in the bash script. """ log_path = os.path.join(CFG['slurm']['logs'].value()) slurm_path = __get_slurm_path() slurm_ld = __get_slurm_ld_library_path() max_running_jobs = CFG['slurm']['max_running'].value() with open(script_name, 'w') as slurm: lines = """#!/bin/bash #SBATCH -o /dev/null #SBATCH -t \"{timelimit}\" #SBATCH --ntasks 1 #SBATCH --cpus-per-task {cpus} """ slurm.write(lines.format(log=str(log_path), timelimit=str(CFG['slurm']['timelimit']), cpus=str(CFG['slurm']['cpus_per_task']))) if not CFG['slurm']['multithread'].value(): slurm.write("#SBATCH --hint=nomultithread\n") if CFG['slurm']['exclusive'].value(): slurm.write("#SBATCH --exclusive\n") slurm.write("#SBATCH --array=0-{0}".format(len(projects) - 1)) slurm.write("%{0}\n".format(max_running_jobs) if max_running_jobs > 0 else '\n') slurm.write("#SBATCH --nice={0}\n".format(CFG["slurm"]["nice"].value())) slurm.write("projects=(\n") for project in projects: slurm.write("'{0}'\n".format(str(project))) slurm.write(")\n") slurm.write("_project=\"${projects[$SLURM_ARRAY_TASK_ID]}\"\n") slurm_log_path = os.path.join( os.path.dirname(CFG['slurm']['logs'].value()), '$_project') slurm.write("exec 1> {log}\n".format(log=slurm_log_path)) slurm.write("exec 2>&1\n") slurm.write(__prepare_node_commands(experiment)) slurm.write("\n") cfg_vars = repr(CFG).split('\n') cfg_vars = "\nexport ".join(cfg_vars) slurm.write("export ") slurm.write(cfg_vars) slurm.write("\n") slurm.write("export PATH={p}\n".format(p=slurm_path)) slurm.write("export LD_LIBRARY_PATH={p}\n".format(p=slurm_ld)) slurm.write("\n") slurm.write("scontrol update JobId=$SLURM_JOB_ID ") slurm.write("JobName=\"{0} $_project\"\n".format(experiment)) slurm.write("\n") # Write the experiment command. slurm.write(__cleanup_node_commands(slurm_log_path)) slurm.write(str(benchbuild["-P", "$_project", "-E", experiment]) + "\n") bash("-n", script_name) chmod("+x", script_name)