def configure(self): from benchbuild.utils.compiler import lt_clang, lt_clang_cxx from benchbuild.utils.run import run from plumbum.cmd import autoreconf, make rasdaman_dir = path.join(self.builddir, self.src_dir) gdal_dir = path.join(self.builddir, self.gdal_dir, self.gdal_dir) with local.cwd(self.builddir): clang = lt_clang(self.cflags, self.ldflags, self.compiler_extension) clang_cxx = lt_clang_cxx(self.cflags, self.ldflags, self.compiler_extension) with local.cwd(gdal_dir): configure = local["./configure"] with local.env(CC=str(clang), CXX=str(clang_cxx)): run(configure["--with-pic", "--enable-static", "--disable-debug", "--with-gnu-ld", "--without-ld-shared", "--without-libtool"]) run(make["-j", CFG["jobs"]]) with local.cwd(rasdaman_dir): autoreconf("-i") configure = local["./configure"] with local.env(CC=str(clang), CXX=str(clang_cxx)): run(configure["--without-debug-symbols", "--enable-benchmark", "--with-static-libs", "--disable-java", "--with-pic", "--disable-debug", "--without-docs"])
def compile(self): self.download() js_dir = local.path(self.src_file) / "js" / "src" clang = compiler.cc(self) clang_cxx = compiler.cxx(self) with local.cwd(js_dir): make_src_pkg = local["./make-source-package.sh"] with local.env( DIST=self.builddir, MOZJS_MAJOR_VERSION=0, MOZJS_MINOR_VERSION=0, MOZJS_PATCH_VERSION=0): make_src_pkg() mozjs_dir = local.path("mozjs-0.0.0") mozjs_src_dir = mozjs_dir / "js" / "src" tar("xfj", mozjs_dir + ".tar.bz2") with local.cwd(mozjs_src_dir): mkdir("obj") autoconf = local["autoconf-2.13"] autoconf() with local.cwd("obj"): with local.env(CC=str(clang), CXX=str(clang_cxx)): configure = local["../configure"] configure = configure["--without-system-zlib"] run.run(configure) mozjs_obj_dir = mozjs_src_dir / "obj" with local.cwd(mozjs_obj_dir): run.run(make["-j", str(CFG["jobs"])])
def compile(self): self.download() download.Git(self.gdal_uri, self.gdal_dir) rasdaman_dir = local.path(self.src_file) gdal_dir = local.path(self.gdal_dir) / self.gdal_dir clang = compiler.cc(self) clang_cxx = compiler.cxx(self) with local.cwd(gdal_dir): configure = local["./configure"] with local.env(CC=str(clang), CXX=str(clang_cxx)): run.run(configure["--with-pic", "--enable-static", "--disable-debug", "--with-gnu-ld", "--without-ld-shared", "--without-libtool"]) run.run(make["-j", CFG["jobs"]]) with local.cwd(rasdaman_dir): autoreconf("-i") configure = local["./configure"] with local.env(CC=str(clang), CXX=str(clang_cxx)): run.run(configure["--without-debug-symbols", "--enable-benchmark", "--with-static-libs", "--disable-java", "--with-pic", "--disable-debug", "--without-docs"]) run.run(make["clean", "all", "-j", CFG["jobs"]])
def run_project(self, p): """ Run the experiment with papi support. Args: p: The project we run. """ from pprof.settings import config p = self.init_project(p) with local.env(PPROF_ENABLE=1): from uuid import uuid4 p.cflags = ["-mllvm", "-instrument"] + p.cflags p.ldflags = p.ldflags + ["-lpprof"] for i in range(1, int(config["jobs"]) + 1): with step("{} cores & uuid {}".format(i, p.run_uuid)): p.clean() p.prepare() p.download() with local.env(PPROF_ENABLE=0): p.compiler_extension = partial(collect_compilestats, p, self, config) p.configure() p.build() p.run_uuid = uuid4() p.run(partial(run_with_papi, p, self, config, i))
def build(self): import sys # Don't do something when running non-interactive. if not sys.stdout.isatty(): return from plumbum import FG from benchbuild.utils.downloader import update_hash from logging import info root = CFG["tmp_dir"].value() src_file = self.src_file + ".new" with local.cwd(self.builddir): emerge_in_chroot = uchroot()["/usr/bin/emerge"] emerge_boost = uchroot(uid=501, gid=10)["/usr/bin/emerge"] with local.env(CC="gcc", CXX="g++", ACCEPT_KEYWORDS="~amd64"): with local.env(USE="-filecaps"): run(emerge_in_chroot["likwid"]) with local.env(USE="static-libs"): run(emerge_in_chroot["dev-libs/libpfm"]) run(emerge_in_chroot["dev-libs/papi"]) run(emerge_in_chroot["sys-process/time"]) run(emerge_boost["dev-utils/boost-build"]) run(emerge_boost["dev-libs/boost"]) tgt_path = path.join(root, self.src_file) tgt_path_new = path.join(root, src_file) tar("cjf", tgt_path_new, ".") update_hash(src_file, root) mv(path.join(root, src_file), tgt_path)
def compile(self): self.download() download.Wget(self.libmcrypt_uri, self.libmcrypt_file) download.Wget(self.mhash_uri, self.mhash_file) tar('xfz', self.src_file) tar('xfz', self.libmcrypt_file) tar('xfz', self.mhash_file) builddir = local.path(self.builddir) mcrypt_dir = builddir / "mcrypt-2.6.8" mhash_dir = builddir / self.mhash_dir libmcrypt_dir = builddir / self.libmcrypt_dir _cc = compiler.cc(self) _cxx = compiler.cxx(self) # Build mhash dependency with local.cwd(mhash_dir): configure = local["./configure"] with local.env(CC=_cc, CXX=_cxx): run.run(configure["--prefix=" + builddir]) run.run(make["-j", CFG["jobs"], "install"]) # Builder libmcrypt dependency with local.cwd(libmcrypt_dir): configure = local["./configure"] with local.env(CC=_cc, CXX=_cxx): run.run(configure["--prefix=" + builddir]) run.run(make["-j", CFG["jobs"], "install"]) with local.cwd(mcrypt_dir): configure = local["./configure"] lib_dir = builddir / "lib" inc_dir = builddir / "include" env = CFG["env"].value mod_env = dict( CC=_cc, CXX=_cxx, LD_LIBRARY_PATH=path.list_to_path( [str(lib_dir)] + env.get("LD_LIBRARY_PATH", [])), LDFLAGS="-L" + str(lib_dir), CFLAGS="-I" + str(inc_dir)) env.update(mod_env) with local.env(**env): run.run(configure["--disable-dependency-tracking", "--enable-static", "--disable-shared", "--with-libmcrypt=" + builddir, "--with-libmhash=" + builddir]) run.run(make["-j", CFG["jobs"]])
def test_django_project(tmpdir): output_dir = str(tmpdir) project_name = 'test_project' generate_project(output_dir=output_dir, extra_context={ 'project_name': project_name, }) with local.cwd(os.path.join(output_dir, project_name)): with local.env( DATABASE_URL='sqlite://localhost/:memory:', ): python['manage.py', 'migrate']() with local.env( DJANGO_SETTINGS_MODULE='{}.settings.test'.format(project_name), ): python['manage.py', 'test']()
def test_custom_env(self): with local.env(): items = {'one':'OnE', 'tww':'TWOO'} local.env.update(items) assert 'tww' in local.env local.env.clear() assert 'tww' not in local.env
def run(self): """ Run the experiment on all registered projects. Setup the environment and call run_project method on all projects. """ from datetime import datetime from logging import error, info experiment, session = persist_experiment(self) if experiment.begin is None: experiment.begin = datetime.now() else: experiment.begin = min(experiment.begin, datetime.now()) try: with local.env(PPROF_EXPERIMENT_ID=str(config["experiment"])): self.map_projects(self.run_this_project, "run") except KeyboardInterrupt: error("User requested termination.") except Exception as ex: error("{}".format(ex)) print("Shutting down...") finally: if experiment.end is None: experiment.end = datetime.now() else: experiment.end = max(experiment.end, datetime.now()) session.add(experiment) session.commit()
def run_tests(self, runner): def filter_stderr(stderr_raw, stderr_filtered): """Extract dump_arrays_output from stderr.""" with open(stderr_raw, 'r') as stderr: with open(stderr_filtered, 'w') as stderr_filt: stderr_filt.writelines( get_dump_arrays_output(stderr.readlines())) polybench_opts = CFG["projects"]["polybench"] verify = bool(polybench_opts["verify"]) binary = local.cwd / self.name opt_stderr_raw = binary + ".stderr" opt_stderr_filtered = opt_stderr_raw + ".filtered" runner(wrapping.wrap(binary, self)) filter_stderr(opt_stderr_raw, opt_stderr_filtered) if verify: binary = local.cwd / (self.name + ".no-opts") noopt_stderr_raw = binary + ".stderr" noopt_stderr_filtered = noopt_stderr_raw + ".filtered" with local.env(BB_IS_BASELINE=True): runner(wrapping.wrap(binary, self)) filter_stderr(noopt_stderr_raw, noopt_stderr_filtered) diff_cmd = diff[noopt_stderr_filtered, opt_stderr_filtered] runner(diff_cmd, retcode=0)
def run_project(self, p): """ Create & Run a papi-instrumented version of the project. This experiment does not use the -jitable flag of libPolyJIT. Therefore, we get the static (aka Standard) SCoP coverage. """ from pprof.settings import config llvm_libs = path.join(config["llvmdir"], "lib") with step("Class: Standard - PAPI"): p.download() p.ldflags = ["-L" + llvm_libs, "-lpjit", "-lpprof", "-lpapi"] ld_lib_path = [_f for _f in config["ld_library_path"].split(":") if _f] p.ldflags = ["-L" + el for el in ld_lib_path] + p.ldflags p.cflags = ["-O3", "-Xclang", "-load", "-Xclang", "LLVMPolyJIT.so", "-mllvm", "-polli", "-mllvm", "-instrument", "-mllvm", "-no-recompilation", "-mllvm", "-polly-detect-keep-going"] with substep("reconf & rebuild"): with local.env(PPROF_ENABLE=0): p.configure() p.build() with substep("run"): p.run(partial(run_with_time, p, self, config, 1)) with step("Evaluation"): bin_path = path.join(config["llvmdir"], "bin") pprof_calibrate = local[path.join(bin_path, "pprof-calibrate")] papi_calibration = self.get_papi_calibration(p, pprof_calibrate) self.persist_calibration(p, pprof_calibrate, papi_calibration)
def collect_compilestats(project, experiment, config, clang, **kwargs): """Collect compilestats.""" from benchbuild.utils.run import guarded_exec, handle_stdin from benchbuild.settings import CFG as c from benchbuild.utils.db import persist_compilestats from benchbuild.utils.schema import CompileStat c.update(config) clang = handle_stdin(clang["-mllvm", "-stats"], kwargs) with local.env(BB_ENABLE=0): with guarded_exec(clang, project, experiment) as run: ri = run() if ri.retcode == 0: stats = [] for stat in get_compilestats(ri.stderr): compile_s = CompileStat() compile_s.name = stat["desc"].rstrip() compile_s.component = stat["component"].rstrip() compile_s.value = stat["value"] stats.append(compile_s) components = c["cs"]["components"].value() if components is not None: stats = [s for s in stats if str(s.component) in components] names = c["cs"]["names"].value() if names is not None: stats = [s for s in stats if str(s.name) in names] persist_compilestats(ri.db_run, ri.session, stats)
def run_tests(self, runner): leveldb_dir = local.path("leveldb.src") with local.cwd(leveldb_dir): with local.env(LD_LIBRARY_PATH=leveldb_dir): sqlite = wrapping.wrap( leveldb_dir / 'out-static' / 'db_bench_sqlite3', self) run.run(sqlite)
def postgres(schema, tables, data_directory, psql_path, **params): psql = local.get('psql', psql_path) data_directory = Path(data_directory) logger.info('Initializing PostgreSQL...') engine = init_database( 'postgresql', params, schema, isolation_level='AUTOCOMMIT' ) query = "COPY {} FROM STDIN WITH (FORMAT CSV, HEADER TRUE, DELIMITER ',')" database = params['database'] for table in tables: src = data_directory / '{}.csv'.format(table) load = psql[ '--host', params['host'], '--port', params['port'], '--username', params['user'], '--dbname', database, '--command', query.format(table), ] with local.env(PGPASSWORD=params['password']): with src.open('r') as f: load(stdin=f) engine.execute('VACUUM FULL ANALYZE')
def run(self, experiment): """ Run the tests of this project. This method initializes the default environment and takes care of cleaning up the mess we made, after a successfull run. Args: experiment: The experiment we run this project under """ from pprof.utils.run import GuardedRunException from pprof.utils.run import (begin_run_group, end_run_group, fail_run_group) with local.cwd(self.builddir): with local.env(PPROF_USE_DATABASE=1, PPROF_DB_RUN_GROUP=self.run_uuid, PPROF_DOMAIN=self.domain, PPROF_GROUP=self.group_name, PPROF_SRC_URI=self.src_uri): group, session = begin_run_group(self) try: self.run_tests(experiment) end_run_group(group, session) except GuardedRunException: fail_run_group(group, session) except KeyboardInterrupt as key_int: fail_run_group(group, session) raise key_int if not config["keep"]: self.clean()
def run_build(self, makefile=None): log.info('Running build') if self.architecture.lower() == 'esp': with local.env(ESP_ROOT=self.esp_root): return self.run_makefile(makefile=makefile) else: return self.run_makefile(makefile=makefile)
def compile(self): with local.env(CC="gcc", CXX="g++"): emerge_in_chroot = uchroot.uchroot()["/usr/bin/emerge"] run.run(emerge_in_chroot["app-portage/portage-utils"]) run.run(emerge_in_chroot["app-portage/gentoolkit"]) qgrep_in_chroot = uchroot.uchroot()["/usr/bin/qgrep"] equery_in_chroot = uchroot.uchroot()["/usr/bin/equery"] ebuilds = set() languages = CFG["gentoo"]["autotest_lang"].value use_flags = CFG["gentoo"]["autotest_use"].value file_location = str(CFG["gentoo"]["autotest_loc"]) for language in languages: output = qgrep_in_chroot("-l", get_string_for_language(language)) for line in output.split('\n'): if "ebuild" in line: parts = line.split('.ebuild')[0].split('/') package_atom = '{0}/{1}'.format(parts[0], parts[1]) ebuilds.add(package_atom) for use in use_flags: output = equery_in_chroot("-q", "hasuse", "-p", use) ebuilds_use = set() for line in output.split('\n'): ebuilds_use.add(re.sub(r"(.*)-[0-9]+.*$", r"\1", line)) ebuilds = ebuilds.intersection(ebuilds_use) with open(file_location, "w") as output_file: for ebuild in sorted(ebuilds): output_file.write(str(ebuild) + "\n") output_file.flush()
def __str__(self): try: domain, _, name = self.name.partition("_") package = domain + '/' + name _container = self.container() _uchroot = uchroot.no_args() _uchroot = _uchroot["-E", "-A", "-C", "-w", "/", "-r"] _uchroot = _uchroot[_container.local] with local.env(CONFIG_PROTECT="-*"): fake_emerge = _uchroot["emerge", "--autounmask-only=y", "--autounmask-write=y", "--nodeps"] run.run(fake_emerge[package]) emerge_in_chroot = \ _uchroot["emerge", "-p", "--nodeps", package] _, stdout, _ = emerge_in_chroot.run() for line in stdout.split('\n'): if package in line: _, _, package_name = line.partition("/") _, name, version = package_name.partition(name) version, _, _ = version.partition(" ") return version[1:] except ProcessExecutionError: logger = logging.getLogger(__name__) logger.info("This older package might not exist any more.") return ""
def configure(self): from benchbuild.utils.run import run # First we have to prepare boost for lady povray... boost_dir = path.join(self.builddir, self.boost_src_dir) boost_prefix = path.join(self.builddir, "boost-install") with local.cwd(boost_dir): from plumbum.cmd import mkdir mkdir(boost_prefix) bootstrap = local["./bootstrap.sh"] run(bootstrap["--with-toolset=clang", "--prefix=\"{0}\"".format( boost_prefix)]) b2 = local["./b2"] run(b2["--ignore-site-config", "variant=release", "link=static", "threading=multi", "optimization=speed", "install"]) povray_dir = path.join(self.builddir, self.src_dir) with local.cwd(path.join(povray_dir, "unix")): from plumbum.cmd import sh sh("prebuild.sh") with local.cwd(povray_dir): from benchbuild.utils.compiler import lt_clang, lt_clang_cxx with local.cwd(self.builddir): clang = lt_clang(self.cflags, self.ldflags, self.compiler_extension) clang_cxx = lt_clang_cxx(self.cflags, self.ldflags, self.compiler_extension) configure = local["./configure"] with local.env(COMPILED_BY="BB <*****@*****.**>", CC=str(clang), CXX=str(clang_cxx)): run(configure["--with-boost=" + boost_prefix])
def run_tests(self, experiment): from plumbum.cmd import mkdir from benchbuild.project import wrap from benchbuild.utils.run import run povray_dir = path.join(self.builddir, self.src_dir) povray_binary = path.join(povray_dir, "unix", self.name) tmpdir = path.join(self.builddir, "tmp") povini = path.join(self.builddir, "cfg", ".povray", "3.6", "povray.ini") scene_dir = path.join(self.builddir, "share", "povray-3.6", "scenes") mkdir(tmpdir, retcode=None) povray = wrap(povray_binary, experiment) pov_files = find(scene_dir, "-name", "*.pov").splitlines() for pov_f in pov_files: from plumbum.cmd import head, grep, sed with local.env(POVRAY=povray_binary, INSTALL_DIR=self.builddir, OUTPUT_DIR=tmpdir, POVINI=povini): options = ((((head["-n", "50", "\"" + pov_f + "\""] | grep["-E", "'^//[ ]+[-+]{1}[^ -]'"]) | head["-n", "1"]) | sed["s?^//[ ]*??"]) & FG) run(povray["+L" + scene_dir, "+L" + tmpdir, "-i" + pov_f, "-o" + tmpdir, options, "-p"], retcode=None)
def test_home(self): assert local.env.home == local.env['HOME'] old_home = local.env.home with local.env(): local.env.home = 'Nobody' assert local.env.home == local.env['HOME'] assert local.env.home == 'Nobody' assert local.env.home == old_home
def test_bound_env(self): from plumbum.cmd import printenv with local.env(FOO = "hello"): assert printenv.with_env(BAR = "world")("FOO") == "hello\n" assert printenv.with_env(BAR = "world")("BAR") == "world\n" assert printenv.with_env(FOO = "sea", BAR = "world")("FOO") == "sea\n" assert printenv("FOO") == "hello\n"
def get_tool_command(self): if self.__executable.endswith('.py'): command = local['python'][self.get_executable_path()] else: with local.env(PATH=self.get_directory()): command = local[self.__executable] return command
def __call__(self): if not self._obj: return if not self._action_fn: return with local.env(BB_EXPERIMENT_ID=str(CFG["experiment_id"])): self._action_fn()
def test_bound_env(self): try: from plumbum.cmd import printenv except CommandNotFound: self.skipTest("printenv is missing") with local.env(FOO = "hello"): self.assertEqual(printenv.with_env(BAR = "world")("FOO", "BAR"), "hello\nworld\n") self.assertEqual(printenv.with_env(FOO = "sea", BAR = "world")("FOO", "BAR"), "sea\nworld\n")
def build(self): with local.cwd(self.builddir): emerge_in_chroot = uchroot()["/usr/bin/emerge"] prog = self.DOMAIN + "/" + str(self.NAME)[len(self.DOMAIN)+1:] with local.env(CONFIG_PROTECT="-*"): emerge_in_chroot("--autounmask-only=y", "--autounmask-write=y", prog, retcode=None) run(emerge_in_chroot[prog])
def test_change_env(self): with local.env(silly=12): assert 12 == local.env['silly'] actual = set(x.split('=')[0] for x in printenv().splitlines() if '=' in x) localenv = set(x[0] for x in local.env) print(actual, localenv) assert localenv == actual assert len(local.env) == len(actual)
def build_client_certs(self, client, settings): with local.env(**settings): pkitool = local[os.path.join(local.env['EASY_RSA'], 'pkitool')] client_files = glob(os.path.join(local.env['KEY_DIR'], client + '.*')) if client_files: print "Client {} certs already exist, skipping".format(client) else: pkitool.run(client, retcode=0, stderr=sys.stdout)
def compile(self): self.download() clang = compiler.cc(self) clang_cxx = compiler.cxx(self) with local.cwd(self.src_file): with local.env(CXX=str(clang_cxx), CC=str(clang)): make("clean") run.run(make["all", "-i"])
def prepare_project(self, project): """ Invoke the prepare phase of the given project. Args: project (pprof.Project): The project we want to prepare. """ with local.env(PPROF_ENABLE=0): project.prepare()
def run_idiomize(args, mode='inplace'): full_args = ['-r', mode, '--cargo'] + args ld_lib_path = get_rust_toolchain_libpath() # don't overwrite existing ld lib path if any... if 'LD_LIBRARY_PATH' in local.env: ld_lib_path += ':' + local.env['LD_LIBRARY_PATH'] with local.env(RUST_BACKTRACE='1', LD_LIBRARY_PATH=ld_lib_path): with local.cwd(os.path.join(RFK_DIR, 'rust')): idiomize[full_args]()
def test_item(self): with local.env(): local.env["simple_plum"] = "thing" assert "simple_plum" in local.env del local.env["simple_plum"] assert "simple_plum" not in local.env local.env["simple_plum"] = "thing" assert "simple_plum" in local.env assert "thing" == local.env.pop("simple_plum") assert "simple_plum" not in local.env local.env["simple_plum"] = "thing" assert "simple_plum" not in local.env
def main(self, threads=None): env = local.env(OMP_NUM_THREADS=threads) if threads else local.env() with env: results = make_results(self.profile) failed = [result for result in results if result['code'] != 0] successes = [result for result in results if result['code'] == 0] if failed: colors.fatal.print("Failed:") for result in failed: colors.fatal.print(result['name'], result['code']) else: colors.success.print("All programs completed.") print() colors.info.print('{0:20}:\tTotal time (s)\tFit times'.format("Program")) for result in successes: fit = ', '.join(MIN_TIME.findall(result['stdout'])) print((colors.success if result['code'] == 0 else colors.warn) | '{0[name]:20}:\t{0[time]}\t{1}'.format(result, fit)) if threads: colors.info.print("OMP Threads:", threads)
def run_tests(self, experiment, run): exp = wrap( path.join(self.SRC_FILE, "build", "dynamic", "bin", "minisat"), experiment) testfiles = glob(path.join(self.testdir, "*.cnf.gz")) minisat_lib_path = path.join(self.SRC_FILE, "build", "dynamic", "lib") for test_f in testfiles: with local.env(LD_LIBRARY_PATH=minisat_lib_path + ":" + getenv("LD_LIBRARY_PATH", "")): run((exp < test_f), None)
def test_item(self): with local.env(): local.env['simple_plum'] = 'thing' assert 'simple_plum' in local.env del local.env['simple_plum'] assert 'simple_plum' not in local.env local.env['simple_plum'] = 'thing' assert 'simple_plum' in local.env assert 'thing' == local.env.pop('simple_plum') assert 'simple_plum' not in local.env local.env['simple_plum'] = 'thing' assert 'simple_plum' not in local.env
def compile(self) -> None: """Compile the project.""" glib_source = local.path(self.source_of_primary) cc_compiler = bb.compiler.cc(self) with local.cwd(glib_source): with local.env(CC=str(cc_compiler)): bb.watch(meson)("build") bb.watch(ninja)("-j", get_number_of_jobs(bb_cfg()), "-C", "build") verify_binaries(self)
def build(self, client, settings): with local.env(**settings): serial_file = os.path.join(local.env['KEY_DIR'], 'serial') index_file = os.path.join(local.env['KEY_DIR'], 'index.txt') # Do not continue without the serial file or index.txt if (not local.path(serial_file).exists()) or (not local.path(index_file).exists()): sys.exit("Index.txt or serial does not exist in KEY_DIR, aborting") self.build_client_certs(client, settings) self.build_client_ovpn_file(client, settings) self.tarball_client_files(client, settings)
def tarball_client_files(self, client, settings): # Always regen the tarball with local.env(GZIP='-n', **settings): tar.run(['-czvf', os.path.join(local.env['KEY_DIR'], client + '.tar.gz'), '-C', local.env['KEY_DIR'], client + '.crt', client + '.key', client + '.ovpn', 'ca.crt', ], retcode=0, stderr=sys.stdout)
def fetch(): os.makedirs("systems", exist_ok=True) with local.cwd("systems"): git("clone", "https://github.com/asoroa/ukb.git") with local.cwd("ukb/src"): local["./configure"]() make() # Prepare with local.env(UKB_PATH=abspath("systems/ukb/src")): with local.cwd("support/ukb"): bash("./prepare_wn30graph.sh") (python[__file__, "mkwndict", "--en-synset-ids"] > "support/ukb/wndict.fi.txt")()
def compile(self) -> None: """Compile the project.""" libxml2_version_source = local.path(self.source_of( self.primary_source)) c_compiler = bb.compiler.cc(self) with local.cwd(libxml2_version_source): with local.env(CC=str(c_compiler)): bb.watch(cmake)("-G", "Unix Makefiles", ".") bb.watch(make)("-j", get_number_of_jobs(bb_cfg())) verify_binaries(self)
def compile(self) -> None: """Compile the project.""" bitlbee_source = local.path(self.source_of(self.primary_source)) compiler = bb.compiler.cc(self) with local.cwd(bitlbee_source): with local.env(CC=str(compiler)): bb.watch(local["./configure"])() bb.watch(make)("-j", get_number_of_jobs(bb_cfg())) verify_binaries(self)
def compile(self) -> None: """Compile the project.""" tmux_source = local.path(self.source_of_primary) clang = bb.compiler.cc(self) with local.cwd(tmux_source): with local.env(CC=str(clang)): bb.watch(local["./autogen.sh"])() bb.watch(local["./configure"])() bb.watch(make)("-j", get_number_of_jobs(bb_cfg())) verify_binaries(self)
def run_with_time(project, experiment, config, jobs, run_f, args, **kwargs): """ Run the given binary wrapped with time. Args: project: The benchbuild.project. experiment: The benchbuild.experiment. config: The benchbuild.settings.config. jobs: Number of cores we should use for this exection. run_f: The file we want to execute. args: List of arguments that should be passed to the wrapped binary. **kwargs: Dictionary with our keyword args. We support the following entries: project_name: The real name of our project. This might not be the same as the configured project name, if we got wrapped with ::benchbuild.project.wrap_dynamic has_stdin: Signals whether we should take care of stdin. """ from benchbuild.utils.run import track_execution, fetch_time_output from benchbuild.settings import CFG from benchbuild.utils.db import persist_time, persist_config CFG.update(config) project.name = kwargs.get("project_name", project.name) timing_tag = "BB-JIT: " may_wrap = kwargs.get("may_wrap", True) run_cmd = local[run_f] run_cmd = run_cmd[args] if may_wrap: run_cmd = time["-f", timing_tag + "%U-%S-%e", run_cmd] with local.env(OMP_NUM_THREADS=str(jobs), POLLI_LOG_FILE=CFG["slurm"]["extra_log"].value()): with track_execution(run_cmd, project, experiment) as run: ri = run() if may_wrap: timings = fetch_time_output(timing_tag, timing_tag + "{:g}-{:g}-{:g}", ri.stderr.split("\n")) if timings: persist_time(ri.db_run, ri.session, timings) persist_config( ri.db_run, ri.session, { "cores": str(jobs - 1), "cores-config": str(jobs), "recompilation": "enabled" }) return ri
def compile(self) -> None: """Compile the project.""" git_source = local.path(self.source_of_primary) clang = bb.compiler.cc(self) with local.cwd(git_source): with local.env(CC=str(clang)): delete("configure", "config.status") bb.watch(make)("configure") bb.watch(local["./configure"])() bb.watch(make)("-j", get_number_of_jobs(bb_cfg())) verify_binaries(self)
def run_tests(self, experiment, run): """ Execute LevelDB's runtime configuration. Args: experiment: The experiment's run function. """ exp = wrap(path.join(self.SRC_FILE, "out-shared", "db_bench"), experiment) with local.env(LD_LIBRARY_PATH="{}:{}".format( path.join(self.SRC_FILE, "out-shared"), getenv("LD_LIBRARY_PATH", ""))): run(exp)
def reload_plumbum_env() -> Dict[str, Any]: """Reloads `local.env` after re-sourcing .zshrc""" fd, temp_path = tempfile.mkstemp() with local.env(ZINIT_WAIT=""): (cmd.zsh["-s"] << f"{sys.executable} -c {shlex.quote(EXPORT_OS_ENVIRON_SOURCE)} {temp_path}" )() with open(fd) as f: env = json.load(f) local.env.update(**env) return env
def build_leveldb(self): sqlite_dir = local.path('sqlite-amalgamation-{0}'.format(self.version)) leveldb_dir = "leveldb.src" # We need to place sqlite3 in front of all other flags. self.ldflags += ["-L{0}".format(sqlite_dir)] self.cflags += ["-I{0}".format(sqlite_dir)] clang_cxx = compiler.cxx(self) clang = compiler.cc(self) with local.cwd(leveldb_dir): with local.env(CXX=str(clang_cxx), CC=str(clang)): run.run(make["clean", "out-static/db_bench_sqlite3"])
def compile(self) -> None: coreutils_source = local.path(self.source_of_primary) compiler = bb.compiler.cc(self) with local.cwd(coreutils_source): git("submodule", "init") git("submodule", "update") with local.env(CC=str(compiler)): bb.watch(local["./bootstrap"])() bb.watch(local["./configure"])("--disable-gcc-warnings") bb.watch(make)("-j", get_number_of_jobs(bb_cfg())) verify_binaries(self)
def __compile_cmake(self) -> None: libssh_source = local.path(self.source_of(self.primary_source)) compiler = bb.compiler.cc(self) mkdir("-p", libssh_source / "build") with local.cwd(libssh_source / "build"): with local.env(CC=str(compiler)): bb.watch(cmake)("-G", "Unix Makefiles", "..") bb.watch(make)("-j", get_number_of_jobs(bb_cfg())) with local.cwd(libssh_source): verify_binaries(self)
def compile(self): download.Svn(self.src_uri, self.SRC_FILE) clang = compiler.cc(self) clang_cxx = compiler.cxx(self) with local.cwd(self.SRC_FILE): configure = local["./configure"] with local.env(CC=str(clang), CXX=str(clang_cxx)): run.run(configure["--without-ccache", "--disable-pic14-port", "--disable-pic16-port"]) run.run(make["-j", CFG["jobs"]])
def compile(self): self.download() tar("xfz", self.src_file) unpack_dir = local.path('ruby-{0}'.format(self.version)) clang = compiler.cc(self) clang_cxx = compiler.cxx(self) with local.cwd(unpack_dir): with local.env(CC=str(clang), CXX=str(clang_cxx)): configure = local["./configure"] run.run(configure["--with-static-linked-ext", "--disable-shared"]) run.run(make["-j", CFG["jobs"]])
def build_leveldb(self): sqlite_dir = self.src_dir leveldb_dir = "leveldb.src" # We need to place sqlite3 in front of all other flags. self.ldflags += ["-L{0}".format(path.abspath(sqlite_dir))] self.cflags += ["-I{0}".format(path.abspath(sqlite_dir))] clang_cxx = cxx(self) clang = cc(self) with local.cwd(leveldb_dir): with local.env(CXX=str(clang_cxx), CC=str(clang)): run(make["clean", "out-static/db_bench_sqlite3"])
def run_tests(self, runner): """ Execute LevelDB's runtime configuration. Args: experiment: The experiment's run function. """ leveldb = wrapping.wrap( local.path(self.src_file) / "out-static" / "db_bench", self) with local.env(LD_LIBRARY_PATH="{}:{}".format( local.path(self.src_file) / "out-shared", getenv("LD_LIBRARY_PATH", ""))): runner(leveldb)
def configure(self): mcrypt_dir = self.src_dir mhash_dir = self.mhash_dir libmcrypt_dir = self.libmcrypt_dir # Build mhash dependency with local.cwd(mhash_dir): configure = local["./configure"] with local.env(CC=lt_clang(self.cflags, self.ldflags, self.compiler_extension), CXX=lt_clang_cxx(self.cflags, self.ldflags, self.compiler_extension)): run(configure["--prefix=" + self.builddir]) run(make["-j", CFG["jobs"], "install"]) # Builder libmcrypt dependency with local.cwd(libmcrypt_dir): configure = local["./configure"] with local.env(CC=lt_clang(self.cflags, self.ldflags, self.compiler_extension), CXX=lt_clang_cxx(self.cflags, self.ldflags, self.compiler_extension)): run(configure["--prefix=" + self.builddir]) run(make["-j", CFG["jobs"], "install"]) with local.cwd(mcrypt_dir): configure = local["./configure"] with local.env(CC=lt_clang(self.cflags, self.ldflags, self.compiler_extension), CXX=lt_clang_cxx(self.cflags, self.ldflags, self.compiler_extension), LD_LIBRARY_PATH=path.join(self.builddir, "lib") + ":" + CFG["ld_library_path"], LDFLAGS="-L" + path.join(self.builddir, "lib"), CFLAGS="-I" + path.join(self.builddir, "include")): run(configure["--disable-dependency-tracking", "--enable-static", "--disable-shared", "--with-libmcrypt=" + self.builddir, "--with-libmhash=" + self.builddir])
def main(self): fshome = local.path(os.getenv('FREESURFER_HOME')) if not fshome: logging.error('Set FREESURFER_HOME first.') sys.exit(1) with TemporaryDirectory() as tmpdir: tmpdir = local.path(tmpdir) b0masked = tmpdir / "b0masked.nrrd" b0masked1mm = tmpdir / "b0masked1mm.nrrd" brain = tmpdir / "brain.nii.gz" wmparc = tmpdir / "wmparc.nii.gz" brainmgz = self.parent.fsdir / 'mri/brain.mgz' wmparcmgz = self.parent.fsdir / 'mri/wmparc.mgz' wmparcindwi1mm = tmpdir / 'wmparcInDwi1mm.nii.gz' logging.info( "Make brain.nii.gz and wmparc.nii.gz from their mgz versions") vol2vol = local[fshome / 'bin/mri_vol2vol'] label2vol = local[fshome / 'bin/mri_label2vol'] with local.env(SUBJECTS_DIR=''): vol2vol('--mov', brainmgz, '--targ', brainmgz, '--regheader', '--o', brain) label2vol('--seg', wmparcmgz, '--temp', brainmgz, '--regheader', wmparcmgz, '--o', wmparc) logging.info('Extract B0 from DWI and mask') bse_py('-i', self.parent.dwi, '-m', self.parent.dwimask, '-o', b0masked) logging.info('Made masked B0') logging.info('Upsample masked baseline to 1x1x1') ResampleImageBySpacing('3', b0masked, b0masked1mm, '1', '1', '1') logging.info('Made 1x1x1 baseline') logging.info('Register wmparc to B0') pre = tmpdir / 'fsbrain_to_b0' affine = pre + '0GenericAffine.mat' warp = pre + '1Warp.nii.gz' antsRegistrationSyNMI_sh['-m', brain, '-f', b0masked1mm, '-o', pre, '-n', 32] & FG antsApplyTransforms('-d', '3', '-i', wmparc, '-t', warp, affine, '-r', b0masked1mm, '-o', wmparcindwi1mm, '--interpolation', 'NearestNeighbor') logging.info('Made ' + wmparcindwi1mm) logging.info('Make output directory') self.parent.out.mkdir() b0masked.copy(self.parent.out) b0masked1mm.copy(self.parent.out) wmparcindwi1mm.copy(self.parent.out)
def test(model, inf, keyin, keyout): test_path = get_xml_key_pair(inf, keyin) tempdir = tempfile.mkdtemp(prefix="ctx2vec") result_path = pjoin(tempdir, "results") with local.cwd("systems/context2vec"), local.env( PIPENV_IGNORE_VIRTUALENVS="1"): pipenv_python( "context2vec/eval/wsd/test.py", test_path, "model.params", model, result_path, ) python(__file__, "context2vec-key-to-unified", result_path, keyout)
def redirect(self): if not CFG["unionfs"]["enable"]: container.unpack(self.container, self.builddir) setup_networking() setup_benchbuild() configure_portage() self.configure_benchbuild(CFG) path.mkfile_uchroot("/.benchbuild-container") benchbuild = find_benchbuild() with local.env(BB_VERBOSITY=str(CFG['verbosity'])): project_id = "{0}/{1}".format(self.name, self.group) run.run(benchbuild["run", "-E", self.experiment.name, project_id])
def __call__(self, binary_command, *args, **kwargs): from benchbuild.settings import CFG config = self.config if config is not None and 'jobs' in config.keys(): jobs = config['jobs'] else: LOG.warning("Parameter 'config' was unusable, using defaults") jobs = CFG["jobs"].value() ret = None with local.env(OMP_NUM_THREADS=str(jobs)): ret = self.call_next(binary_command, *args, **kwargs) return ret
def compile(self) -> None: """Compile the project.""" libzmq_version_source = local.path(self.source_of_primary) cpp_compiler = bb.compiler.cxx(self) mkdir(libzmq_version_source / "build") with local.cwd(libzmq_version_source / "build"): with local.env(CXX=str(cpp_compiler)): bb.watch(cmake)("-G", "Unix Makefiles", "..") bb.watch(make)("-j", get_number_of_jobs(bb_cfg())) with local.cwd(libzmq_version_source): verify_binaries(self)
def test_env(self): assert "PATH" in local.env assert "FOOBAR72" not in local.env with pytest.raises(ProcessExecutionError): local.python("-c", "import os;os.environ['FOOBAR72']") local.env["FOOBAR72"] = "spAm" assert local.python("-c", "import os;print (os.environ['FOOBAR72'])").splitlines() == ["spAm"] with local.env(FOOBAR73 = 1889): assert local.python("-c", "import os;print (os.environ['FOOBAR73'])").splitlines() == ["1889"] with local.env(FOOBAR73 = 1778): assert local.python("-c", "import os;print (os.environ['FOOBAR73'])").splitlines() == ["1778"] assert local.python("-c", "import os;print (os.environ['FOOBAR73'])").splitlines() == ["1889"] with pytest.raises(ProcessExecutionError): local.python("-c", "import os;os.environ['FOOBAR73']") # path manipulation with pytest.raises(CommandNotFound): local.which("dummy-executable") with local.env(): local.env.path.insert(0, local.cwd / "not-in-path") p = local.which("dummy-executable") assert p == local.cwd / "not-in-path" / "dummy-executable"