def compile(self): self.download() download.Git(self.gdal_uri, self.gdal_dir) rasdaman_dir = local.path(self.src_file) gdal_dir = local.path(self.gdal_dir) / self.gdal_dir clang = compiler.cc(self) clang_cxx = compiler.cxx(self) with local.cwd(gdal_dir): configure = local["./configure"] with local.env(CC=str(clang), CXX=str(clang_cxx)): run.run(configure["--with-pic", "--enable-static", "--disable-debug", "--with-gnu-ld", "--without-ld-shared", "--without-libtool"]) run.run(make["-j", CFG["jobs"]]) with local.cwd(rasdaman_dir): autoreconf("-i") configure = local["./configure"] with local.env(CC=str(clang), CXX=str(clang_cxx)): run.run(configure["--without-debug-symbols", "--enable-benchmark", "--with-static-libs", "--disable-java", "--with-pic", "--disable-debug", "--without-docs"]) run.run(make["clean", "all", "-j", CFG["jobs"]])
def setup_container(builddir, _container): """Prepare the container and returns the path where it can be found.""" build_dir = local.path(builddir) in_dir = build_dir / "container-in" container_path = local.path(_container) with local.cwd(builddir): container_bin = container_path.basename container_in = in_dir / container_bin download.Copy(_container, container_in) uchrt = uchroot.no_args() with local.cwd("container-in"): uchrt = uchrt["-E", "-A", "-u", "0", "-g", "0", "-C", "-r", "/", "-w", os.path.abspath("."), "--"] # Check, if we need erlent support for this archive. has_erlent = bash[ "-c", "tar --list -f './{0}' | grep --silent '.erlent'".format( container_in)] has_erlent = (has_erlent & TF) # Unpack input container to: container-in if not has_erlent: cmd = local["/bin/tar"]["xf"] cmd = uchrt[cmd[container_bin]] else: cmd = tar["xf"] cmd = cmd[container_in] with local.cwd("container-in"): cmd("--exclude=dev/*") rm(container_in) return in_dir
def install_uchroot(_): """Installer for erlent (contains uchroot).""" builddir = local.path(str(CFG["build_dir"].value)) with local.cwd(builddir): erlent_src = local.path('erlent') erlent_git = erlent_src / '.git' erlent_repo = str(CFG['uchroot']['repo']) erlent_build = erlent_src / 'build' if not erlent_git.exists(): git("clone", erlent_repo) else: with local.cwd(erlent_src): git("pull", "--rebase") erlent_build.mkdir() with local.cwd(erlent_build): cmake("../") make() os.environ["PATH"] = os.path.pathsep.join( [erlent_build, os.environ["PATH"]]) local.env.update(PATH=os.environ["PATH"]) if not find_package("uchroot"): LOG.error('uchroot not found, after updating PATH to %s', os.environ['PATH']) sys.exit(-1) env = CFG['env'].value if 'PATH' not in env: env['PATH'] = [] env['PATH'].append(str(erlent_build))
def __init__(self, *args, **kwargs): super(VoodooSub, self).__init__(*args, **kwargs) if args and args[0] == 'voodoo new': return config_path = '.'.join([self.parent.env, DOCKER_COMPOSE_PATH]) if self.parent.env == 'dev': self.config_path = config_path elif local.path(config_path).is_file(): self.config_path = config_path elif local.path(DOCKER_COMPOSE_PATH).is_file(): self.config_path = DOCKER_COMPOSE_PATH else: raise_error( "There is not %s.%s or %s file, please add one" % (self.parent.env, DOCKER_COMPOSE_PATH, DOCKER_COMPOSE_PATH)) self.main_service = self._get_main_service() if self.parent.env == 'dev': if not local.path(self.config_path).isfile(): generate = ask( "There is not dev.docker-compose.yml file.\n" "Do you want to generate one automatically", default=True) if generate: self.run_hook(GenerateDevComposeFile) else: raise_error("No dev.docker-compose.yml file, abort!") self.compose = compose['-f', self.config_path]
def wait_for_mount(): logger.info("Waiting for mount") if local.path(mounted_mark_inner).exists(): local.path(mounted_mark_inner).delete() return True else: raise NotMountedException()
def walk_rec(cur_path, root): cur_path = local.path(root) / test_file if cur_path.exists(): return cur_path new_root = local.path(root) / os.pardir return walk_rec(cur_path, new_root) if new_root != root else None
def main(self, *projects): """Main entry point of benchbuild run.""" exp = [self._experiment] group_names = self._group_names benchbuild.experiments.discover() benchbuild.projects.discover() all_exps = benchbuild.experiment.ExperimentRegistry.experiments if self._description: CFG["experiment_description"] = self._description CFG["slurm"]["logs"] = local.path(str(CFG['build_dir'])) / str( CFG["slurm"]["logs"]) CFG["build_dir"] = str(CFG["slurm"]["node_dir"]) exps = dict(filter(lambda pair: pair[0] in set(exp), all_exps.items())) unknown_exps = list( filter(lambda name: name not in all_exps.keys(), set(exp))) if unknown_exps: print('Could not find ', str(unknown_exps), ' in the experiment registry.') sys.exit(1) prjs = benchbuild.project.populate(projects, group_names) for exp_cls in exps.values(): exp = exp_cls(projects=prjs) print("Experiment: ", exp.name) CFG["slurm"]["node_dir"] = local.path( str(CFG["slurm"]["node_dir"])) / str(exp.id) self.__go__(prjs, exp)
def compile(self): self.download() makefile_config = local.path(self.src_file) / "config" / "make.config" clang = compiler.cc(self) with open(makefile_config, 'w') as config: lines = [ "LABEL=benchbuild", "ENABLE_OMPSS=", "OMPSSC=", "OMPC=", "CC={cc}", "OMPSSLINK=", "OMPLINK={cc} -fopenmp", "CLINK={cc}", "OPT_FLAGS=", "CC_FLAGS=", "OMPC_FLAGS=", "OMPSSC_FLAGS=", "OMPC_FINAL_FLAGS=", "OMPSSC_FINAL_FLAG=", "CLINK_FLAGS=", "OMPLINK_FLAGS=", "OMPSSLINK_FLAGS=", ] lines = [l.format(cc=clang) + "\n" for l in lines] config.writelines(lines) mkdir(local.path(self.src_file) / "bin") with local.cwd(self.src_file): run.run(make["-C", self.path_dict[self.name]])
def mount_via_cron(device, volume_id): logger.info("Ensuring cron mount") mount_script = "/volumes/automount/%s" % volume_id mount_script_inner = "/host_root%s" % mount_script mounted_mark = "/volumes/automount/.mounted-%s" % volume_id mounted_mark_inner = "/host_root%s" % mounted_mark if not local.path('/host_root/volumes/automount').exists(): mkdir('-p', "/host_root/volumes/automount") sh('-c', "echo '* * * * * root cd / && run-parts --report /volumes/automount' >> /host_root/etc/crontab") mkdir('-p', "/host_root/volumes/%s" % volume_id) local.path(mount_script_inner).write( """#!/usr/bin/env sh set -e mount {device} /volumes/{volume_id} touch {mounted_mark} rm {mount_script} """.format(**locals()) ) chmod('+x', "/host_root%s" % mount_script) @retry(wait_fixed=2000, stop_max_attempt_number=60) def wait_for_mount(): logger.info("Waiting for mount") if local.path(mounted_mark_inner).exists(): local.path(mounted_mark_inner).delete() return True else: raise NotMountedException() wait_for_mount()
def test_atomic_counter(self): local.path("counter").delete() num_of_procs = 20 num_of_increments = 20 code = """from plumbum.fs.atomic import AtomicCounterFile import time time.sleep(0.2) afc = AtomicCounterFile.open("counter") for _ in range(%s): print(afc.next()) time.sleep(0.1) """ % (num_of_increments,) procs = [] for _ in range(num_of_procs): procs.append(local.python["-c", code].popen()) results = [] for p in procs: out, _ = p.communicate() self.assertEqual(p.returncode, 0) results.extend(int(num) for num in out.splitlines()) self.assertEqual(len(results), num_of_procs * num_of_increments) self.assertEqual(len(set(results)), len(results)) self.assertEqual(min(results), 0) self.assertEqual(max(results), num_of_procs * num_of_increments - 1) local.path("counter").delete()
def compile(self): self.download() js_dir = local.path(self.src_file) / "js" / "src" clang = compiler.cc(self) clang_cxx = compiler.cxx(self) with local.cwd(js_dir): make_src_pkg = local["./make-source-package.sh"] with local.env( DIST=self.builddir, MOZJS_MAJOR_VERSION=0, MOZJS_MINOR_VERSION=0, MOZJS_PATCH_VERSION=0): make_src_pkg() mozjs_dir = local.path("mozjs-0.0.0") mozjs_src_dir = mozjs_dir / "js" / "src" tar("xfj", mozjs_dir + ".tar.bz2") with local.cwd(mozjs_src_dir): mkdir("obj") autoconf = local["autoconf-2.13"] autoconf() with local.cwd("obj"): with local.env(CC=str(clang), CXX=str(clang_cxx)): configure = local["../configure"] configure = configure["--without-system-zlib"] run.run(configure) mozjs_obj_dir = mozjs_src_dir / "obj" with local.cwd(mozjs_obj_dir): run.run(make["-j", str(CFG["jobs"])])
def test_atomic_file(self): af1 = AtomicFile("tmp.txt") af2 = AtomicFile("tmp.txt") af1.write_atomic(six.b("foo")) af2.write_atomic(six.b("bar")) self.assertEqual(af1.read_atomic(), six.b("bar")) self.assertEqual(af2.read_atomic(), six.b("bar")) local.path("tmp.txt").delete()
def run_tests(self, runner): crocopat = wrapping.wrap(self.run_f, self) programs = local.path(self.testdir) / "programs" // "*.rml" projects = local.path(self.testdir) / "projects" // "*.rsf" for program in programs: for _project in projects: runner((cat[_project] | crocopat[program]), None)
def wrap_in_union_fs_func(project, *args, **kwargs): """ Wrap the func in the UnionFS mount stack. We make sure that the mount points all exist and stack up the directories for the unionfs. All directories outside of the default build environment are tracked for deletion. """ container = project.container if container is None or in_container(): return func(project, *args, **kwargs) build_dir = local.path(project.builddir) LOG.debug("UnionFS - Project builddir: %s", project.builddir) if __unionfs_is_active(root=build_dir): LOG.debug( "UnionFS already active in %s, nesting not supported.", build_dir) return func(project, *args, **kwargs) ro_dir = local.path(container.local) rw_dir = build_dir / rw un_dir = build_dir / union LOG.debug("UnionFS - RW: %s", rw_dir) unionfs_cmd = __unionfs_set_up(ro_dir, rw_dir, un_dir) project_builddir_bak = project.builddir project.builddir = un_dir proc = unionfs_cmd.popen() while (not __unionfs_is_active(root=un_dir)) and \ (proc.poll() is None): pass ret = None if proc.poll() is None: try: with local.cwd(un_dir): ret = func(project, *args, **kwargs) finally: project.builddir = project_builddir_bak from signal import SIGINT is_running = proc.poll() is None while __unionfs_is_active(root=un_dir) and is_running: try: proc.send_signal(SIGINT) proc.wait(timeout=3) except subprocess.TimeoutExpired: proc.kill() is_running = False LOG.debug("Unionfs shut down.") if __unionfs_is_active(root=un_dir): raise UnmountError() return ret
def test_relative_to(self): p = local.path("/var/log/messages") self.assertEqual(p.relative_to("/var/log/messages"), RelativePath([])) self.assertEqual(p.relative_to("/var/"), RelativePath(["log", "messages"])) self.assertEqual(p.relative_to("/"), RelativePath(["var", "log", "messages"])) self.assertEqual(p.relative_to("/var/tmp"), RelativePath(["..", "log", "messages"])) self.assertEqual(p.relative_to("/opt"), RelativePath(["..", "var", "log", "messages"])) self.assertEqual(p.relative_to("/opt/lib"), RelativePath(["..", "..", "var", "log", "messages"])) for src in [local.path("/var/log/messages"), local.path("/var"), local.path("/opt/lib")]: delta = p.relative_to(src) self.assertEqual(src + delta, p)
def test_relative_to(self): p = local.path("/var/log/messages") assert p.relative_to("/var/log/messages") == RelativePath([]) assert p.relative_to("/var/") == RelativePath(["log", "messages"]) assert p.relative_to("/") == RelativePath(["var", "log", "messages"]) assert p.relative_to("/var/tmp") == RelativePath(["..", "log", "messages"]) assert p.relative_to("/opt") == RelativePath(["..", "var", "log", "messages"]) assert p.relative_to("/opt/lib") == RelativePath(["..", "..", "var", "log", "messages"]) for src in [local.path("/var/log/messages"), local.path("/var"), local.path("/opt/lib")]: delta = p.relative_to(src) assert src + delta == p
def clean_directories(builddir, in_dir=True, out_dir=True): """Remove the in and out of the container if confirmed by the user.""" container_in = local.path(builddir) / "container-in" container_out = local.path(builddir) / "container-out" if in_dir and container_in.exists(): if ui.ask("Should I delete '{0}'?".format(container_in)): container_in.delete() if out_dir and container_out.exists(): if ui.ask("Should I delete '{0}'?".format(container_out)): container_out.delete()
def __init__(self, projectPath): assert projectPath, "No project path set" self.projectPath = projectPath self.inputContainer = local.path(local.cwd / projectPath) self.projectName = self.inputContainer.basename self.sphinxConfig = self.inputContainer / self.SPHINX_CONFIG self.buildPath = local.path(self.inputContainer / "__build") self.doctreesPath = self.buildPath / "doctrees" self.outputPath = self.buildPath / "html" log.info("[PROJECT INFO]: input from %s - html generated in %s" % (self.inputContainer, self.outputPath))
def run_tests(self, runner): binary_name = "{name}.benchbuild.serial".format(name=self.name) binary_path = local.path(self.src_file) / "bin" / binary_name exp = wrapping.wrap(binary_path, self) if self.name in self.input_dict: for test_input in self.input_dict[self.name]: input_file = local.path( self.src_file) / "inputs" / self.name / test_input runner(exp["-f", input_file]) else: runner(exp)
def build(self, client, settings): with local.env(**settings): serial_file = os.path.join(local.env['KEY_DIR'], 'serial') index_file = os.path.join(local.env['KEY_DIR'], 'index.txt') # Do not continue without the serial file or index.txt if (not local.path(serial_file).exists()) or (not local.path(index_file).exists()): sys.exit("Index.txt or serial does not exist in KEY_DIR, aborting") self.build_client_certs(client, settings) self.build_client_ovpn_file(client, settings) self.tarball_client_files(client, settings)
def run_tests(self, runner): """ Execute LevelDB's runtime configuration. Args: experiment: The experiment's run function. """ leveldb = wrapping.wrap( local.path(self.src_file) / "out-static" / "db_bench", self) with local.env(LD_LIBRARY_PATH="{}:{}".format( local.path(self.src_file) / "out-shared", getenv("LD_LIBRARY_PATH", ""))): runner(leveldb)
def test_pid_file(self): code = """from __future__ import with_statement from plumbum.fs.atomic import PidFile, PidFileTaken try: with PidFile("mypid"): raise ValueError("this should have failed") except PidFileTaken: print("already locked") """ with PidFile("mypid"): output = local.python("-c", code) self.assertEqual(output.strip(), "already locked") local.path("mypid").delete()
def main(self): local.cwd.chdir(HERE) sys.path.insert(0, str(ROOT)) local.env["PYTHONPATH"] = ROOT local.python("build_openni.py") local.python("build_nite.py") from primesense import openni2, nite2 dist = local.path("../dist") dist.delete() dist.mkdir() tmp = local.path("tmp") tmp.delete() tmp.mkdir() copy("../primesense", tmp / "primesense") copy("MANIFEST.in", tmp) copy("../LICENSE", tmp) copy("../README.rst", tmp) ver = "%s.%s.%s.%s-%s" % (openni2.c_api.ONI_VERSION_MAJOR, openni2.c_api.ONI_VERSION_MINOR, openni2.c_api.ONI_VERSION_MAINTENANCE, openni2.c_api.ONI_VERSION_BUILD, config.get("pypi", "release")) data = local.path("setup_template.py").read().replace("$VERSION$", ver) (tmp / "setup.py").write(data) with local.cwd(tmp): if self.upload: # copy pypirc to ~ orig = local.path("~/.pypirc") restore = False if orig.exists(): copy(orig, "~/.pypirc-openni-wrapper") restore = True copy(ROOT / "_pypirc", "~/.pypirc") try: local.python["setup.py", "sdist", "--formats=zip,gztar", None if self.dont_register else "register", "upload"] & FG finally: if restore: copy("~/.pypirc-openni-wrapper", orig) else: local.python["setup.py", "sdist", "--formats=zip,gztar"] & FG for fn in tmp / "dist" // "*": fn.move(dist)
def revoke_client(client, settings): # Do not continue without the serial file or index.txt with local.env(**settings): serial_file = os.path.join(local.env['KEY_DIR'], 'serial') index_file = os.path.join(local.env['KEY_DIR'], 'index.txt') client_hash = parse_index_file(index_file) if (not local.path(serial_file).exists()) or (not local.path(index_file).exists()): sys.exit("Index.txt or serial does not exist in KEY_DIR, aborting") if client_hash.get(client, {}).get('is_valid'): revoke_full = local[os.path.join(local.env['EASY_RSA'], 'revoke-full')] revoke_full.run(client, retcode=2, stderr=sys.stdout) rm.run(os.path.join(local.env['KEY_DIR'], 'revoke-test.pem'), retcode=0)
def test_atomic_counter2(self): local.path("counter").delete() afc = AtomicCounterFile.open("counter") self.assertEqual(afc.next(), 0) self.assertEqual(afc.next(), 1) self.assertEqual(afc.next(), 2) self.assertRaises(TypeError, afc.reset, "hello") afc.reset(70) self.assertEqual(afc.next(), 70) self.assertEqual(afc.next(), 71) self.assertEqual(afc.next(), 72) local.path("counter").delete()
def compile(self): package_atom = "{domain}/{name}".format( domain=self.domain, name=self.name) LOG.debug('Installing dependencies.') emerge(package_atom, '--onlydeps', env=self.emerge_env) c_compiler = local.path(str(compiler.cc(self))) cxx_compiler = local.path(str(compiler.cxx(self))) setup_compilers('/etc/portage/make.conf') ln("-sf", str(c_compiler), local.path('/') / c_compiler.basename) ln('-sf', str(cxx_compiler), local.path('/') / cxx_compiler.basename) LOG.debug('Installing %s.', package_atom) emerge(package_atom, env=self.emerge_env)
def test_local(self): self.assertTrue("plumbum" in str(local.cwd)) self.assertTrue("PATH" in local.env.getdict()) self.assertEqual(local.path("foo"), os.path.join(os.getcwd(), "foo")) local.which("ls") local["ls"] self.assertEqual(local.python("-c", "print ('hi there')").splitlines(), ["hi there"])
def test_atomic_counter2(self): local.path("counter").delete() afc = AtomicCounterFile.open("counter") assert afc.next() == 0 assert afc.next() == 1 assert afc.next() == 2 with pytest.raises(TypeError): afc.reset("hello") afc.reset(70) assert afc.next() == 70 assert afc.next() == 71 assert afc.next() == 72 local.path("counter").delete()
def get_version_from_cache_dir(src_file): """ Creates a version for a project out of the hash. The hash is taken from the directory of the source file. Args: src_file: The source file of the project using this function. Returns: Either returns the first 8 digits of the hash as string, the entire hash as a string if the hash consists out of less than 7 digits or None if the path is incorrect. """ if src_file is None: return None tmp_dir = local.path(str(CFG["tmp_dir"])) if tmp_dir.exists(): cache_file = tmp_dir / src_file dir_hash = get_hash_of_dirs(cache_file) if dir_hash is None: return None if len(str(dir_hash)) <= 7: return str(dir_hash) return str(dir_hash)[:7] return None
def _generate_links(self, config): "Link modules defined in repos.yml/yaml in modules folder" spec = yaml.load(open(config).read()) dest_path = local.path(LINK_FOLDER) for key, repo in spec.items(): modules = repo.pop('modules', []) self._set_links(key, modules, dest_path)
def main(): cmd, partial_word, prev_word = argv[1:] line = local.env['COMP_LINE'] suggestions = [] words = shlex.split(line) subcmd = get_subcmd(words) suggestions.extend(opt for opt in (*opts[subcmd], *flags[subcmd]) if opt.startswith(partial_word)) if prev_word in (*opts[subcmd], *hlp): suggestions.clear() elif subcmd == 'essex': suggestions.extend(sc for sc in subcommands if sc.startswith(partial_word)) elif subcmd == 'sig' and prev_word == 'sig': suggestions.extend(sig for sig in signals if sig.startswith(partial_word)) elif subcmd not in ('list', 'new', 'off', 'on', 'tree'): suggestions.extend(svc.name for svc in get_svcs(words) if svc.name.startswith(partial_word)) if subcmd == 'new' and prev_word in ('-u', '--as-user'): suggestions.extend( line.split(':')[0] for line in local.path('/etc/passwd').read().splitlines()) print('\n'.join(suggestions))
def compile(self): self.download() download.Wget(self.boost_src_uri, self.boost_src_file) tar("xfj", self.boost_src_file) cp("-ar", local.path(self.testdir) / "cfg", '.') cp("-ar", local.path(self.testdir) / "etc", '.') cp("-ar", local.path(self.testdir) / "scenes", '.') cp("-ar", local.path(self.testdir) / "share", '.') cp("-ar", local.path(self.testdir) / "test", '.') clang = compiler.cc(self) clang_cxx = compiler.cxx(self) # First we have to prepare boost for lady povray... boost_prefix = "boost-install" with local.cwd(self.boost_src_dir): mkdir(boost_prefix) bootstrap = local["./bootstrap.sh"] run.run(bootstrap["--with-toolset=clang", "--prefix=\"{0}\"".format(boost_prefix)]) _b2 = local["./b2"] run.run(_b2["--ignore-site-config", "variant=release", "link=static", "threading=multi", "optimization=speed", "install"]) src_file = local.path(self.src_file) with local.cwd(src_file): with local.cwd("unix"): sh("prebuild.sh") configure = local["./configure"] with local.env(COMPILED_BY="BB <*****@*****.**>", CC=str(clang), CXX=str(clang_cxx)): run.run(configure["--with-boost=" + boost_prefix]) run.run(make["all"])
def main(self): with tempfile.TemporaryDirectory() as tmpdir: tmpdir = local.path(tmpdir) b0masked = tmpdir / "b0masked.nii.gz" # Sylvain wants both b0maskedbrain = tmpdir / "b0maskedbrain.nii.gz" t2masked = tmpdir / 't2masked.nii.gz' print('Masking the T2') ImageMath(3, t2masked, 'm', self.t2, self.t2mask) brain = tmpdir / "brain.nii.gz" wmparc = tmpdir / "wmparc.nii.gz" brainmgz = self.parent.fsdir / 'mri/brain.mgz' wmparcmgz = self.parent.fsdir / 'mri/wmparc.mgz' wmparcindwi = tmpdir / 'wmparcInDwi.nii.gz' # Sylvain wants both wmparcinbrain = tmpdir / 'wmparcInBrain.nii.gz' print( "Making brain.nii.gz and wmparc.nii.gz from their mgz versions" ) vol2vol = local[self.parent.fshome / 'bin/mri_vol2vol'] label2vol = local[self.parent.fshome / 'bin/mri_label2vol'] with local.env(SUBJECTS_DIR=''): vol2vol('--mov', brainmgz, '--targ', brainmgz, '--regheader', '--o', brain) label2vol('--seg', wmparcmgz, '--temp', brainmgz, '--regheader', wmparcmgz, '--o', wmparc) print('Extracting B0 from DWI and masking it') check_call((' ').join([ pjoin(FILEDIR, 'bse.py'), '-i', self.parent.dwi, '-m', self.parent.dwimask, '-o', b0masked ]), shell=True) print('Made masked B0') # rigid registration from t2 to brain.nii.gz pre = tmpdir / 'BrainToT2' BrainToT2Affine = pre + '0GenericAffine.mat' print('Computing rigid registration from brain.nii.gz to t2') rigid_registration(3, brain, t2masked, pre) # generates three files for rigid registration: # pre0GenericAffine.mat preInverseWarped.nii.gz preWarped.nii.gz # generates five files for default(rigid+affine+deformable syn) registration: # pre0GenericAffine.mat pre1Warp.nii.gz preWarped.nii.gz pre1InverseWarp.nii.gz preInverseWarped.nii.gz dwi_res = load_nifti( str(b0masked)).header['pixdim'][1:4].round(decimals=2) brain_res = load_nifti( str(brain)).header['pixdim'][1:4].round(decimals=2) print(f'DWI resolution: {dwi_res}') print(f'FreeSurfer brain resolution: {brain_res}') print('Registering wmparc to B0 through T2') registerFs2Dwi_T2(tmpdir, 'fsbrainToT2ToB0', b0masked, t2masked, BrainToT2Affine, wmparc, wmparcindwi) if (dwi_res != brain_res).any(): print( 'DWI resolution is different from FreeSurfer brain resolution' ) print( 'wmparc wil be registered to both DWI and brain resolution' ) print( 'Check output files wmparcInDwi.nii.gz and wmparcInBrain.nii.gz' ) print('Resampling B0 to brain resolution') ResampleImageBySpacing('3', b0masked, b0maskedbrain, brain_res.tolist()) print('Registering wmparc to resampled B0') registerFs2Dwi_T2(tmpdir, 'fsbrainToT2ToResampledB0', b0maskedbrain, t2masked, BrainToT2Affine, wmparc, wmparcinbrain) # copying images to outDir b0masked.copy(self.parent.out) wmparcindwi.copy(self.parent.out) if b0maskedbrain.exists(): b0maskedbrain.copy(self.parent.out) wmparcinbrain.copy(self.parent.out) if self.parent.debug: tmpdir.copy(self.parent.out, 'fs2dwi-debug-' + str(os.getpid())) print('See output files in ', self.parent.out._path)
def main(self): # if self.force: # logging.info('Deleting previous output directory') # rm('-rf', self.outDir) temp= self.dwi_file.split(',') primaryVol= abspath(temp[0]) if len(temp)<2: raise AttributeError('Two volumes are required for --imain') else: secondaryVol= abspath(temp[1]) if self.b0_brain_mask: temp = self.b0_brain_mask.split(',') primaryMask = abspath(temp[0]) if len(temp) == 2: secondaryMask = abspath(temp[1]) else: secondaryMask = abspath(temp[0]) else: primaryMask=[] secondaryMask=[] # obtain 4D/3D info and time axis info dimension = load_nifti(primaryVol).header['dim'] dim1 = dimension[0] if dim1!=4: raise AttributeError('primary volume must be 4D, however, secondary can be 3D/4D') numVol1 = dimension[4] dimension = load_nifti(secondaryVol).header['dim'] dim2 = dimension[0] numVol2 = dimension[4] temp= self.bvals_file.split(',') if len(temp)>=1: primaryBval= abspath(temp[0]) if len(temp)==2: secondaryBval= abspath(temp[1]) elif len(temp)==1 and dim2==4: secondaryBval= primaryBval elif len(temp)==1 and dim2==3: secondaryBval=[] elif len(temp)==0: raise AttributeError('--bvals are required') temp= self.bvecs_file.split(',') if len(temp)>=1: primaryBvec= abspath(temp[0]) if len(temp)==2: secondaryBvec= abspath(temp[1]) elif len(temp) == 1 and dim2 == 4: secondaryBvec = primaryBvec elif len(temp)==1 and dim2==3: secondaryBvec=[] else: raise AttributeError('--bvecs are required') with TemporaryDirectory() as tmpdir: tmpdir= local.path(tmpdir) # mask both volumes, fslmaths can do that irrespective of dimension logging.info('Masking the volumes') primaryMaskedVol = tmpdir / 'primaryMasked.nii.gz' secondaryMaskedVol = tmpdir / 'secondaryMasked.nii.gz' if primaryMask: # mask the volume fslmaths[primaryVol, '-mas', primaryMask, primaryMaskedVol] & FG else: primaryMaskedVol= primaryVol if secondaryMask: # mask the volume fslmaths[secondaryVol, '-mas', secondaryMask, secondaryMaskedVol] & FG else: secondaryMaskedVol= secondaryVol logging.info('Extracting B0 from masked volumes') B0_PA= tmpdir / 'B0_PA.nii.gz' B0_AP= tmpdir / 'B0_AP.nii.gz' obtainB0(primaryMaskedVol, primaryBval, B0_PA, self.num_b0) if dim2==4: obtainB0(secondaryMaskedVol, secondaryBval, B0_AP, self.num_b0) else: B0_AP= secondaryMaskedVol B0_PA_AP_merged = tmpdir / 'B0_PA_AP_merged.nii.gz' with open(self.acqparams_file._path) as f: acqp= f.read().split('\n') logging.info('Writing acqparams.txt for topup') # firstDim: first acqp line should be replicated this number of times firstB0dim= load_nifti(str(B0_PA)).header['dim'][4] # secondDim: second acqp line should be replicated this number of times secondB0dim= load_nifti(str(B0_AP)).header['dim'][4] acqp_topup= tmpdir / 'acqp_topup.txt' with open(acqp_topup,'w') as f: for i in range(firstB0dim): f.write(acqp[0]+'\n') for i in range(secondB0dim): f.write(acqp[1]+'\n') logging.info('Merging B0_PA and BO_AP') fslmerge('-t', B0_PA_AP_merged, B0_PA, B0_AP) topup_params, applytopup_params, eddy_openmp_params= obtain_fsl_eddy_params(self.eddy_config_file._path) # Example for topup # === on merged b0 images === # https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/eddy/UsersGuide#Running_topup_on_the_b.3D0_volumes # topup --imain=both_b0 --datain=my_acq_param.txt --out=my_topup_results # === on all b0 images === # https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/topup/TopupUsersGuide#Running_topup # topup --imain=all_my_b0_images.nii --datain=acquisition_parameters.txt --config=b02b0.cnf --out=my_output logging.info('Running topup') topup_results= tmpdir / 'topup_results' topup[f'--imain={B0_PA_AP_merged}', f'--datain={acqp_topup}', f'--out={topup_results}', '--verbose', topup_params.split()] & FG logging.info('Running applytopup') topupMask= tmpdir / 'topup_mask.nii.gz' # applytopup on primary4D,secondary4D/3D topupOut= tmpdir / 'topup_out.nii.gz' if dim2==4: applytopup[f'--imain={primaryMaskedVol},{secondaryMaskedVol}', f'--datain={self.acqparams_file}', '--inindex=1,2', f'--topup={topup_results}', f'--out={topupOut}', '--verbose', applytopup_params.split()] & FG else: applytopup[f'--imain={B0_PA},{B0_AP}', f'--datain={self.acqparams_file}', '--inindex=1,2', f'--topup={topup_results}', f'--out={topupOut}', '--verbose', applytopup_params.split()] & FG topupOutMean= tmpdir / 'topup_out_mean.nii.gz' fslmaths[topupOut, '-Tmean', topupOutMean] & FG bet[topupOutMean, topupMask._path.split('_mask.nii.gz')[0], '-m', '-n'] & FG # another approach could be # threshold mean of primary,secondary mask at 0.5 and obtain modified mask, use that mask for eddy_openmp # fslmerge[topupMask, '-t', primaryMask, secondaryMask] & FG # fslmaths[topupMask, '-Tmean', topupMask] & FG # fslmaths[topupMask, '-thr', '0.5', topupMask, '-odt' 'char'] & FG logging.info('Writing index.txt for topup') indexFile= tmpdir / 'index.txt' with open(indexFile, 'w') as f: for i in range(numVol1): f.write('1\n') outPrefix = tmpdir / basename(primaryVol).split('.')[0] + '_Ep_Ed' temp = self.whichVol.split(',') if len(temp)==1 and temp[0]=='1': # correct only primary4D volume eddy_openmp[f'--imain={primaryMaskedVol}', f'--mask={topupMask}', f'--acqp={self.acqparams_file}', f'--index={indexFile}', f'--bvecs={primaryBvec}', f'--bvals={primaryBval}', f'--out={outPrefix}', f'--topup={topup_results}', '--verbose', eddy_openmp_params.split()] & FG elif len(temp)==2 and temp[1]=='2': # sylvain would like to correct both primary and secondary volumes with open(indexFile, 'a') as f: for i in range(numVol2): f.write('2\n') # join both bvalFiles bvals1= read_bvals(primaryBval) if dim2==4 and not secondaryBval: bvals2= bvals1.copy() elif dim2==4 and secondaryBval: bvals2= read_bvals(secondaryBval) elif dim2==3: bvals2=[0] combinedBvals = tmpdir / 'combinedBvals.txt' write_bvals(combinedBvals, bvals1+bvals2) # join both bvecFiles bvecs1= read_bvecs(primaryBvec) if dim2==4 and not secondaryBvec: bvecs2= bvecs1.copy() elif dim2==4 and secondaryBvec: bvecs2= read_bvecs(secondaryBvec) elif dim2==3: bvecs2=[[0,0,0]] # join both bvecFiles combinedBvecs = tmpdir / 'combinedBvecs.txt' write_bvecs(combinedBvecs, bvecs1+bvecs2) combinedData= tmpdir / 'combinedData.nii.gz' fslmerge('-t', combinedData, primaryMaskedVol, secondaryMaskedVol) eddy_openmp[f'--imain={combinedData}', f'--mask={topupMask}', f'--acqp={self.acqparams_file}', f'--index={indexFile}', f'--bvecs={combinedBvecs}', f'--bvals={combinedBvals}', f'--out={outPrefix}', f'--topup={topup_results}', '--verbose', eddy_openmp_params.split()] & FG else: raise ValueError('Invalid --whichVol') # copy bval,bvec to have same prefix as that of eddy corrected volume copyfile(outPrefix+'.eddy_rotated_bvecs', outPrefix+'.bvec') copyfile(primaryBval, outPrefix+'.bval') # rename topupMask to have same prefix as that of eddy corrected volume topupMask.move(outPrefix+'_mask.nii.gz') tmpdir.move(self.outDir)
def main(self): with TemporaryDirectory() as tmpdir, local.cwd(tmpdir): tmpdir = local.path(tmpdir) dicePrefix = 'dwi' logging.info("Dice DWI") unu['dice','-a','3','-i',self.dwi,'-o',dicePrefix] & FG logging.info("Apply warp to each DWI volume") vols = sorted(tmpdir // (dicePrefix + '*')) # use the following multi-processed loop pool= Pool(int(self.nproc)) res= [] for vol in vols: res.append(pool.apply_async(_WarpImage, (self.dwimask, vol, self.xfm))) volsWarped= [r.get() for r in res] pool.close() pool.join() # or use the following for loop # volsWarped = [] # for vol in vols: # if self.dwimask: # unu('3op','ifelse',self.dwimask,vol,'0','-o',vol) # volwarped = vol.stem + '-warped.nrrd' # WarpImageMultiTransform('3', vol, volwarped, '-R', vol, # self.xfm) # volsWarped.append(volwarped) # # logging.info("Join warped volumes together") (unu['join', '-a', '3', '-i', volsWarped] | \ unu['save', '-e', 'gzip', '-f', 'nrrd', '-o', 'dwi.nhdr']) & FG # get data type with open("dwi.nhdr", "r") as hdr: lines = hdr.readlines() for line in lines: if 'type' in line: typeline=line logging.info( "Create new nrrd header pointing to the newly generated data file") unu('save', '-e', 'gzip', '-f', 'nrrd', '-i', self.dwi, '-o', 'tmpdwi.nhdr') # get other header fields with open("tmpdwi.nhdr", "r") as hdr: lines = hdr.readlines() with open("dwi.nhdr", "w") as hdr: for line in lines: if 'data file' in line: hdr.write('data file: dwi.raw.gz\n') elif 'type' in line: hdr.write(typeline) else: hdr.write(line) logging.info('Make ' + str(self.out)) unu('save', '-e', 'gzip', '-f', 'nrrd', '-i', 'dwi.nhdr', '-o', self.out) logging.info('Made ' + str(self.out)) if self.debug: from os import getpid pid = str(getpid()) d = local.path(self.out.dirname / ('antsApplyTransformsDWi-' + pid)) tmpdir.copy(d)
def _before(): nonlocal test_folder if test_folder is None: test_folder = integration_before() test_folder = local.path(test_folder)
def _update_config_file(self): super(GenerateDevComposeFile, self)._update_config_file() path = local.path('~/.voodoo/shared/maintainer_quality_tools')._path self.config['services']['odoo']['environment'].append( "MAINTAINER_QUALITY_TOOLS=%s" % path)
args = parser.parse_args() num_jobs = multiprocessing.cpu_count() assert os.path.isfile(COMPILE_COMMANDS), "Could not find {}".format( COMPILE_COMMANDS) with open(COMPILE_COMMANDS, 'r') as cc_json: transpile_files(cc_json, filter=args.filter, emit_build_files=False, verbose=True) # Move and rename tmux.rs to main.rs move(TMUX_RS, MAIN_RS) plumbum_rs_glob = local.path(TMUX_REPO) // "*.rs" plumbum_compat_rs_glob = local.path(TMUX_COMPAT_DIR) // "*.rs" # Move source files to src directory move(plumbum_rs_glob, RUST_SRC_DIR) # Move compat files to src/compat directory retcode, _, _ = move(plumbum_compat_rs_glob, RUST_COMPAT_DIR) assert retcode != 1, "Could not move translated rs files:\n{}".format( stderr) # Rename files with dashes to underscores, as rust won't # accept dashes. for path in [RUST_SRC_DIR, RUST_COMPAT_DIR]: rust_rs_files = local.path(path) // "*.rs"
def download_VNF_image(): with pushd(case_path): one_queue_image = get_env("one_queue_image") two_queue_image = get_env("two_queue_image") if os.path.exists(f"{case_path}/{one_queue_image}"): pass else: log_info = """ *********************************************************************** Downloading and decompressing VNF image. This may take a while! *********************************************************************** """ log(log_info) one_queue_zip = get_env("one_queue_zip") cmd = f""" wget http://netqe-bj.usersys.redhat.com/share/tli/vsperf_img/{one_queue_image} > /dev/null 2>&1 #wget people.redhat.com/ctrautma/{one_queue_zip} > /dev/null 2>&1 #lrzip -d {one_queue_zip} #rm -f {one_queue_zip} """ log_and_run(cmd) pass if os.path.exists(f"{case_path}/{two_queue_image}"): pass else: log_info = """ *********************************************************************** Downloading and decompressing VNF image. This may take a while! *********************************************************************** """ log(log_info) two_queue_zip = get_env("two_queue_zip") cmd = f""" wget http://netqe-bj.usersys.redhat.com/share/tli/vsperf_img/{two_queue_image} > /dev/null 2>&1 # wget people.redhat.com/ctrautma/{two_queue_zip} > /dev/null 2>&1 # lrzip -d {two_queue_zip} # rm -f {two_queue_zip} """ log_and_run(cmd) pass udev_file = "60-persistent-net.rules" data = """ ACTION=="add", SUBSYSTEM=="net", KERNELS=="0000:03:00.0", NAME:="eth1" ACTION=="add", SUBSYSTEM=="net", KERNELS=="0000:04:00.0", NAME:="eth2" """ log("add net rules to guest image") log(data) local.path(udev_file).write(data) cmd = f""" virt-copy-in -a {case_path}/{one_queue_image} {udev_file} /etc/udev/rules.d/ virt-copy-in -a {case_path}/{two_queue_image} {udev_file} /etc/udev/rules.d/ """ log_and_run(cmd) dpdk_url = get_env("dpdk_url") dpdk_tool_url = get_env("dpdk_tool_url") dpdk_ver = get_env("dpdk_ver") cmd = f""" rm -rf /root/{dpdk_ver} mkdir -p /root/{dpdk_ver} wget -P /root/{dpdk_ver}/ {dpdk_url} > /dev/null 2>&1 wget -P /root/{dpdk_ver}/ {dpdk_tool_url} > /dev/null 2>&1 virt-copy-in -a {case_path}/{one_queue_image} /root/{dpdk_ver} /root/ virt-copy-in -a {case_path}/{two_queue_image} /root/{dpdk_ver} /root/ sleep 5 """ log_and_run(cmd) return 0
def test_stem(self): assert self.longpath.stem == "file" p = local.path("/some/directory") assert p.stem == "directory"
def test_iterdir(self): cwd = local.path('.') files = list(cwd.iterdir()) assert cwd / 'test_local.py' in files assert cwd / 'test_remote.py' in files
def test_split(self): p = local.path("/var/log/messages") self.assertEqual(p.split(), ["var", "log", "messages"])
# Add option to use the debug version of `c2rust` config.update_args(args) assert os.path.isfile(COMPILE_COMMANDS), "Could not find {}".format( COMPILE_COMMANDS) print(Colors.OKBLUE + "Transpiling..." + Colors.NO_COLOR) transpile( COMPILE_COMMANDS, emit_build_files=False, reorganize_definitions=False, # TODO extra_transpiler_args=["--reduce-type-annotations"]) # Create the src dir if it doesn't already exist mkdir["-p", RUST_SRC_DIR].run() # Move and rename TCC.rs to main.rs move(TCC_RS, MAIN_RS) plumbum_rs_glob = local.path(TCC_REPO) // "*.rs" # Move source files to src directory retcode, _, _ = move(plumbum_rs_glob, RUST_SRC_DIR) assert retcode != 1, "Could not move translated rs files:\n{}".format( stderr) # main.rs needs to know about modules so we add them here add_mods(MAIN_RS) print(Colors.OKGREEN + "Done!" + Colors.NO_COLOR)
# This is used to test the validity of the computations of the tf matrix, idf matrix, the base # data structure and the semantic similarity calculation. It uses ../test/small_file.txt to compute # tests. import math import sys import os from plumbum import local sys.path.append(local.path(__file__).dirname.up()) from create_analysis import initApp from convenience import makeWordToDocDataStructure, whitespace from tf_matrix import makeTfMatrix from idf_matrix import makeIdfMatrix from compute_semantic_similarity import multTfIdf, computeSemanticSimilarity dataLink = os.path.join( local.path(__file__).dirname, '..', 'test', 'small_file.txt') sc = initApp('local', 'test') def testConvDataStructure(): convDataStructure = makeWordToDocDataStructure(sc, dataLink) print('Test for makeWordToDocDataStructure') testVal = [('i', { 0: 2, 1: 1 }), ('like', { 0: 1 }), ('data', { 0: 1,
def test_notouch(self): conf = ConfigINI(fname) assert not local.path(fname).exists()
def test_newname(self): p1 = self.longpath p2 = local.path("file.tar.gz") assert p1.with_name("something.tar") == local.path("/some/long/path/to/something.tar") assert p2.with_name("something.tar") == local.path("something.tar")
def main(self): with tempfile.TemporaryDirectory() as tmpdir: tmpdir = local.path(tmpdir) b0masked = tmpdir / "b0masked.nii.gz" # Sylvain wants both b0maskedbrain = tmpdir / "b0maskedbrain.nii.gz" brain = tmpdir / "brain.nii.gz" wmparc = tmpdir / "wmparc.nii.gz" brainmgz = self.parent.fsdir / 'mri/brain.mgz' wmparcmgz = self.parent.fsdir / 'mri/wmparc.mgz' wmparcindwi = tmpdir / 'wmparcInDwi.nii.gz' # Sylvain wants both wmparcinbrain = tmpdir / 'wmparcInBrain.nii.gz' print( "Making brain.nii.gz and wmparc.nii.gz from their mgz versions" ) vol2vol = local[self.parent.fshome / 'bin/mri_vol2vol'] label2vol = local[self.parent.fshome / 'bin/mri_label2vol'] with local.env(SUBJECTS_DIR=''): vol2vol('--mov', brainmgz, '--targ', brainmgz, '--regheader', '--o', brain) label2vol('--seg', wmparcmgz, '--temp', brainmgz, '--regheader', wmparcmgz, '--o', wmparc) print('Extracting B0 from DWI and masking it') check_call((' ').join([ pjoin(FILEDIR, 'bse.py'), '-i', self.parent.dwi, '-m', self.parent.dwimask, '-o', b0masked ]), shell=True) print('Made masked B0') dwi_res = load_nifti( str(b0masked)).header['pixdim'][1:4].round(decimals=2) brain_res = load_nifti( str(brain)).header['pixdim'][1:4].round(decimals=2) print(f'DWI resolution: {dwi_res}') print(f'FreeSurfer brain resolution: {brain_res}') print('Registering wmparc to B0') registerFs2Dwi(tmpdir, 'fsbrainToB0', b0masked, brain, wmparc, wmparcindwi) if (dwi_res != brain_res).any(): print( 'DWI resolution is different from FreeSurfer brain resolution' ) print( 'wmparc wil be registered to both DWI and brain resolution' ) print( 'Check output files wmparcInDwi.nii.gz and wmparcInBrain.nii.gz' ) print('Resampling B0 to brain resolution') ResampleImageBySpacing('3', b0masked, b0maskedbrain, brain_res.tolist()) print('Registering wmparc to resampled B0') registerFs2Dwi(tmpdir, 'fsbrainToResampledB0', b0maskedbrain, brain, wmparc, wmparcinbrain) # copying images to outDir b0masked.copy(self.parent.out) wmparcindwi.copy(self.parent.out) if b0maskedbrain.exists(): b0maskedbrain.copy(self.parent.out) wmparcinbrain.copy(self.parent.out) if self.parent.debug: tmpdir.copy(self.parent.out, 'fs2dwi-debug-' + str(os.getpid())) print('See output files in ', self.parent.out._path)
def test_getpath(self): assert local.cwd.getpath() == local.path('.')
def attch_sriov_vf_to_vm(xml_file, vm, vlan_id=0): vf1_bus_info = my_tool.get_bus_from_name(get_env("NIC1_VF")) vf2_bus_info = my_tool.get_bus_from_name(get_env("NIC2_VF")) vf1_bus_info = vf1_bus_info.replace(":", '_') vf1_bus_info = vf1_bus_info.replace(".", '_') vf2_bus_info = vf2_bus_info.replace(":", '_') vf2_bus_info = vf2_bus_info.replace(".", '_') log(vf1_bus_info) log(vf2_bus_info) vf1_domain = vf1_bus_info.split('_')[0] vf1_bus = vf1_bus_info.split('_')[1] vf1_slot = vf1_bus_info.split('_')[2] vf1_func = vf1_bus_info.split('_')[3] vf2_domain = vf2_bus_info.split('_')[0] vf2_bus = vf2_bus_info.split('_')[1] vf2_slot = vf2_bus_info.split('_')[2] vf2_func = vf2_bus_info.split('_')[3] vlan_item = """ <interface type='hostdev' managed='yes'> <mac address='{}'/> <vlan > <tag id='{}'/> </vlan> <driver name='vfio'/> <source > <address type='pci' domain='0x{}' bus='0x{}' slot='0x{}' function='0x{}'/> </source > <address type='pci' domain='{}' bus='{}' slot='{}' function='{}'/> </interface > """ item = """ <interface type='hostdev' managed='yes'> <mac address='{}'/> <driver name='vfio'/> <source > <address type='pci' domain='0x{}' bus='0x{}' slot='0x{}' function='0x{}'/> </source > <address type='pci' domain='{}' bus='{}' slot='{}' function='{}'/> </interface > """ vf1_xml_path = os.getcwd() + "/vf1.xml" vf2_xml_path = os.getcwd() + "/vf2.xml" if os.path.exists(vf1_xml_path): os.remove(vf1_xml_path) if os.path.exists(vf2_xml_path): os.remove(vf2_xml_path) local.path(vf1_xml_path).touch() local.path(vf2_xml_path).touch() vf1_f_obj = local.path(vf1_xml_path) vf2_f_obj = local.path(vf2_xml_path) import xml.etree.ElementTree as xml if vlan_id != 0: vf1_format_list = [ '52:54:00:11:8f:ea', vlan_id, vf1_domain, vf1_bus, vf1_slot, vf1_func, '0x0000', '0x03', '0x0', '0x0' ] vf1_vlan_item = vlan_item.format(*vf1_format_list) vf1_vlan_obj = xml.fromstring(vf1_vlan_item) vf1_f_obj.write(xml.tostring(vf1_vlan_obj)) vf2_format_list = [ '52:54:00:11:8f:eb', vlan_id, vf2_domain, vf2_bus, vf2_slot, vf2_func, '0x0000', '0x04', '0x0', '0x0' ] vf2_vlan_item = vlan_item.format(*vf2_format_list) vf2_vlan_obj = xml.fromstring(vf2_vlan_item) vf2_f_obj.write(xml.tostring(vf2_vlan_obj)) else: vf1_format_list = [ '52:54:00:11:8f:ea', vf1_domain, vf1_bus, vf1_slot, vf1_func, '0x0000', '0x03', '0x0', '0x0' ] vf1_novlan_item = item.format(*vf1_format_list) vf1_novlan_obj = xml.fromstring(vf1_novlan_item) vf1_f_obj.write(xml.tostring(vf1_novlan_obj)) vf2_format_list = [ '52:54:00:11:8f:eb', vf2_domain, vf2_bus, vf2_slot, vf2_func, '0x0000', '0x04', '0x0', '0x0' ] vf2_novlan_item = item.format(*vf2_format_list) vf2_novlan_obj = xml.fromstring(vf2_novlan_item) vf2_f_obj.write(xml.tostring(vf2_novlan_obj)) cmd = f""" sleep 10 echo "#################################################" cat {vf1_xml_path} echo "#################################################" cat {vf2_xml_path} echo "#################################################" virsh attach-device {vm} {vf1_xml_path} sleep 5 virsh dumpxml {vm} sleep 10 virsh attach-device {vm} {vf2_xml_path} sleep 5 virsh dumpxml {vm} """ log_and_run(cmd) return 0
def test_path_dir(self): assert local.path(__file__).dirname == SDIR
#!/usr/bin/env python """Snakescript for remove misjoins in the initial megadaph assemblies""" from plumbum import local from snakemake import shell try: threads = str(snakemake.threads) except NameError: threads = 1 inp = local.path(snakemake.input[0]) hm2_dir = inp.dirname sample_id = snakemake.wildcards.sample + "_A" with local.cwd(hm2_dir): shell(' '.join(["./hm.batchB1.initiation_and_all_lastz", sample_id, threads])) shell(' '.join(["./hm.batchB2.chainNet_and_netToMaf", sample_id, threads])) shell(' '.join(["./hm.batchB3.haplomerger", sample_id, threads])) shell(' '.join(["./hm.batchB4.refine_unpaired_sequences", sample_id, threads])) shell(' '.join(["./hm.batchB5.merge_paired_and_unpaired_sequences", sample_id, threads]))
class TestLocalPath: longpath = local.path("/some/long/path/to/file.txt") def test_name(self): name = self.longpath.name assert isinstance(name, six.string_types) assert "file.txt" == str(name) def test_dirname(self): name = self.longpath.dirname assert isinstance(name, LocalPath) assert "/some/long/path/to" == str(name).replace("\\", "/").lstrip("C:") def test_uri(self): if IS_WIN32: assert "file:///C:/some/long/path/to/file.txt" == self.longpath.as_uri() else: assert "file:///some/long/path/to/file.txt" == self.longpath.as_uri() def test_pickle(self): path1 = local.path('.') path2 = local.path('~') assert pickle.loads(pickle.dumps(self.longpath)) == self.longpath assert pickle.loads(pickle.dumps(path1)) == path1 assert pickle.loads(pickle.dumps(path2)) == path2 def test_empty(self): with pytest.raises(TypeError): LocalPath() assert local.path() == local.path('.') @skip_without_chown def test_chown(self): with local.tempdir() as dir: p = dir / "foo.txt" p.write(six.b("hello")) assert p.uid == os.getuid() assert p.gid == os.getgid() p.chown(p.uid.name) assert p.uid == os.getuid() def test_split(self): p = local.path("/var/log/messages") p.split() == ["var", "log", "messages"] def test_suffix(self): p1 = self.longpath p2 = local.path("file.tar.gz") assert p1.suffix == ".txt" assert p1.suffixes == [".txt"] assert p2.suffix == ".gz" assert p2.suffixes == [".tar",".gz"] assert p1.with_suffix(".tar.gz") == local.path("/some/long/path/to/file.tar.gz") assert p2.with_suffix(".other") == local.path("file.tar.other") assert p2.with_suffix(".other", 2) == local.path("file.other") assert p2.with_suffix(".other", 0) == local.path("file.tar.gz.other") assert p2.with_suffix(".other", None) == local.path("file.other") with pytest.raises(ValueError): p1.with_suffix('nodot') def test_newname(self): p1 = self.longpath p2 = local.path("file.tar.gz") assert p1.with_name("something.tar") == local.path("/some/long/path/to/something.tar") assert p2.with_name("something.tar") == local.path("something.tar") def test_relative_to(self): p = local.path("/var/log/messages") assert p.relative_to("/var/log/messages") == RelativePath([]) assert p.relative_to("/var/") == RelativePath(["log", "messages"]) assert p.relative_to("/") == RelativePath(["var", "log", "messages"]) assert p.relative_to("/var/tmp") == RelativePath(["..", "log", "messages"]) assert p.relative_to("/opt") == RelativePath(["..", "var", "log", "messages"]) assert p.relative_to("/opt/lib") == RelativePath(["..", "..", "var", "log", "messages"]) for src in [local.path("/var/log/messages"), local.path("/var"), local.path("/opt/lib")]: delta = p.relative_to(src) assert src + delta == p def test_read_write(self): with local.tempdir() as dir: f = dir / "test.txt" text = six.b('hello world\xd7\xa9\xd7\x9c\xd7\x95\xd7\x9d').decode("utf8") f.write(text, "utf8") text2 = f.read("utf8") assert text == text2 def test_parts(self): parts = self.longpath.parts if IS_WIN32: assert parts == ('C:\\', 'some', 'long', 'path', 'to', 'file.txt') else: assert parts == ('/', 'some', 'long', 'path', 'to', 'file.txt') @pytest.mark.usefixtures("testdir") def test_iterdir(self): cwd = local.path('.') files = list(cwd.iterdir()) assert cwd / 'test_local.py' in files assert cwd / 'test_remote.py' in files def test_stem(self): assert self.longpath.stem == "file" p = local.path("/some/directory") assert p.stem == "directory" def test_root_drive(self): pathlib = pytest.importorskip("pathlib") pl_path = pathlib.Path("/some/long/path/to/file.txt").absolute() assert self.longpath.root == pl_path.root assert self.longpath.drive == pl_path.drive p_path = local.cwd / "somefile.txt" pl_path = pathlib.Path("somefile.txt").absolute() assert p_path.root == pl_path.root assert p_path.drive == pl_path.drive def test_compare_pathlib(self): pathlib = pytest.importorskip("pathlib") def filename_compare(name): p = local.path(str(name)) pl = pathlib.Path(str(name)).absolute() assert str(p) == str(pl) assert p.parts == pl.parts assert p.exists() == pl.exists() assert p.is_symlink() == pl.is_symlink() assert p.as_uri() == pl.as_uri() assert str(p.with_suffix('.this')) == str(pl.with_suffix('.this')) assert p.name == pl.name filename_compare("/some/long/path/to/file.txt") filename_compare(local.cwd / "somefile.txt") filename_compare("/some/long/path/") filename_compare("/some/long/path") filename_compare(__file__) def test_suffix_expected(self): assert self.longpath.preferred_suffix('.tar') == self.longpath assert (local.cwd / 'this').preferred_suffix('.txt') == local.cwd / 'this.txt' def test_touch(self): with local.tempdir() as tmp: one = tmp / 'one' assert not one.is_file() one.touch() assert one.is_file() one.delete() assert not one.is_file() def test_copy_override(self): """Edit this when override behavior is added""" with local.tempdir() as tmp: one = tmp / 'one' one.touch() two = tmp / 'two' assert one.is_file() assert not two.is_file() one.copy(two) assert one.is_file() assert two.is_file() def test_copy_nonexistant_dir(self): with local.tempdir() as tmp: one = tmp / 'one' one.write(b'lala') two = tmp / 'two' / 'one' three = tmp / 'three' / 'two' / 'one' one.copy(two) assert one.read() == two.read() one.copy(three) assert one.read() == three.read() def test_unlink(self): with local.tempdir() as tmp: one = tmp / 'one' one.touch() assert one.exists() one.unlink() assert not one.exists() def test_unhashable(self): with pytest.raises(TypeError): hash(local.cwd) def test_getpath(self): assert local.cwd.getpath() == local.path('.') def test_path_dir(self): assert local.path(__file__).dirname == SDIR
def get_default_volume(self): path = local.path('~/.voodoo/shared')._path return [':'.join([path, path])]
def test_pickle(self): path1 = local.path('.') path2 = local.path('~') assert pickle.loads(pickle.dumps(self.longpath)) == self.longpath assert pickle.loads(pickle.dumps(path1)) == path1 assert pickle.loads(pickle.dumps(path2)) == path2
def test_empty(self): with pytest.raises(TypeError): LocalPath() assert local.path() == local.path('.')
def test_split(self): p = local.path("/var/log/messages") p.split() == ["var", "log", "messages"]
def __mount_source(src_dir): src_dir = local.path(str(src_dir)) mounts = CFG["container"]["mounts"].value mount = {"src": src_dir, "tgt": "/mnt/benchbuild"} mounts.append(mount) CFG["container"]["mounts"] = mounts
class DownloadAudioBible(cli.Application): session = HTMLSession() chapter_info_file = local.path('chapters_info.json') def main(self): if not self.chapter_info_file.exists(): self.get_chapter_info() self.download_mp3_for_all_chapter() def download_mp3_for_all_chapter(self): chapter_infos = self.chapter_info_file.read() chapter_infos = json.loads(chapter_infos) for chap in chapter_infos: self.download_chapter_audio(chap) def download_chapter_audio(self, chapter): local.path(chapter['name']).mkdir() file_dir = local.path(chapter['name']) for i in range(1, chapter['subs']): link = 'https://www.bible.com/mn/bible/1590/{}.{}.АБ2013'.format( chapter['pre'], i) req = self.session.get(link) mp3_link = req.html.find('source')[1].attrs['src'] mp3_link = 'http://' + mp3_link.split('?version_id=')[0][2:] title = req.html.find('title', first=True).text file_name = file_dir / title + '.mp3' if not local.path(file_name).exists(): mp3file = urllib.request.urlopen(mp3_link) with open(file_name, 'wb') as output: output.write(mp3file.read()) print("Downloaded {}".format(file_name)) def get_all_chapters(self): link = 'https://www.bible.com/bible/1/GEN.1.KJV' req = self.session.get(link) ul = req.html.find('ul#list ma0 pa0 bg-white pb5 min-vh-100') print(ul.html) def get_chapter_info(self): """Using selenium package because web is complicated to understand ajax requests""" from selenium import webdriver from webdriver_manager.chrome import ChromeDriverManager from time import sleep driver = webdriver.Chrome(ChromeDriverManager().install()) driver.get("https://www.bible.com") driver.find_element_by_xpath("//a[contains(text(),'Read')]").click() driver.implicitly_wait(2) driver.find_element_by_xpath( "//*[@role='button' and @class='w-100 flex items-center bb b--black-20 mr3 outline-0' " "and span[contains(text(),'Genesis 1')]]").click() driver.implicitly_wait(2) chapter_list = driver.find_element_by_xpath( "//ul[@class='list ma0 pa0 bg-white pb5 min-vh-100']" ).find_elements_by_tag_name("li") chapters = [] for li in chapter_list: pre = li.get_attribute("option") name = li.text chapters.append({'pre': pre, 'name': name, 'subs': 0}) for chap in chapters: driver.find_element_by_xpath( "//li[@data-vars-event-label='{}']".format( chap['pre'])).click() driver.implicitly_wait(2) sub_chapters = driver.find_element_by_xpath( "//div[@class='ma0 pa0 pb5 flex flex-wrap']" ).find_elements_by_tag_name("div") num_sub_chapters = len(sub_chapters) chap['subs'] = num_sub_chapters print(chap) # Cancel from Popup driver.find_element_by_xpath( "//button[@data-vars-event-action='Cancel' and @data-vars-event-category='Chapter Picker' " "and @class='ma0 pa0 truncate f7 f6-m black-30 bn bg-transparent outline-0']" ).click() driver.implicitly_wait(2) # Open Chapters list driver.find_element_by_xpath( "//*[@role='button' and @class='w-100 flex items-center bb b--black-20 mr3 outline-0' " "and span[contains(text(),'Genesis 1')]]").click() sleep(2) driver.close() self.chapter_info_file.write(json.dumps(chapters)) print("Chapter information saved to file {}".format( self.chapter_info_file))
print(Colors.OKBLUE + "Transpiling rest of files..." + Colors.NO_COLOR) transpile(COMPILE_COMMANDS, emit_build_files=True, cross_checks=args.cross_checks, cross_check_config=[CROSS_CHECK_CONFIG_YAML]) # Create rust/examples directory if it doesn't exist mkdir_args = ["-p", RUST_EXAMPLES_DIR] retcode, stdout, stderr = mkdir[mkdir_args].run() assert retcode != 1, "Could not make directory:\n{}".format(stderr) # Move test files to examples directory (since they have their own main function) mv_args = [os.path.join(LIBXML2_REPO, "%s.rs" % test) for test in TESTS] mv_args.append(RUST_EXAMPLES_DIR) retcode, stdout, stderr = mv[mv_args].run() assert retcode != 1, "Could not move translated rs files:\n{}".format( stderr) # Move source files to src directory plumbum_rs_glob = local.path(LIBXML2_REPO) // "*.rs" mv_args = [plumbum_rs_glob, RUST_SRC_DIR] retcode, stdout, stderr = mv[mv_args].run() assert retcode != 1, "Could not move translated rs files:\n{}".format( stderr) print(Colors.OKGREEN + "Done!" + Colors.NO_COLOR)
from pnlpipe_lib import * import pnlpipe_lib.dag as dag from pnlpipe_software import BRAINSTools, trainingDataT1AHCC, FreeSurfer import hashlib from plumbum import local, FG from pnlscripts import TemporaryDirectory, dwiconvert_py, alignAndCenter_py, atlas_py, eddy_py, bet_py, wmql_py import pnlpipe_config import logging from python_log_indenter import IndentedLoggerAdapter logger = logging.getLogger(__name__) log = IndentedLoggerAdapter(logger, indent_char='.') OUTDIR = local.path(pnlpipe_config.OUTDIR) def find_caseid(root): return find_tag(root, 'caseid') def _lookupInputKey(key, caseid): try: pathFormat = pnlpipe_config.INPUT_KEYS[key] caseid_placeholder = pnlpipe_config.INPUT_KEYS['caseid_placeholder'] filepath = local.path(pathFormat.replace(caseid_placeholder, caseid)) return filepath except KeyError as e: msg = """Key '{}' not found in pnlpipe_config.py:INPUT_KEYS. It might be misspelled, or you might need to add it if it's missing. """.format(e.args[0]) raise Exception(msg)