def fixClock(clockName,vtrDir,vprVersion): #for earlier versions a generic clockfix was done in the vpr scripts. if vprVersion != 8 : return #get the cwd. Note that we should be in the build dir cwd = local.cwd #the blif file we want to fix the latches blif_file = cwd / 'abc_out.blif' #fixed clock file fixed_blif = cwd / 'clock_fixed.blif' fixed_blif_temp = cwd / 'clock_fixed_temp.blif' #if we don't have a clock to fix just copy the abc file if clockName is None: from plumbum.cmd import cp cp(str(blif_file), str(fixed_blif)) return #get the modelname of the blif file. need for the fix modelName = BlifParser.extractModelName(str(blif_file)) #run the fix. the first run add the re attribut. the second remove some empty added models fixCommand = "latch_^_re_^_" + modelName + "^" + clockName + "_^_0" fixLatchesPath = vtrDir / "vtr_flow/scripts/blackbox_latches.pl" fixLatches = local[fixLatchesPath] print fixLatches("--input",str(blif_file),"--output", str(fixed_blif_temp),"--restore",fixCommand) print fixLatches("--input",str(fixed_blif_temp),"--output",str(fixed_blif), "--vanilla")
def main(self): self.package = self.package.rstrip("/") package_path = plumbum.local.path(self.package) name = package_path.name mock_path = plumbum.local.path(package_path / "mock_%s" % name) delete(mock_path // "*.go") buildscript = """ load("@com_github_jmhodges_bazel_gomock//:gomock.bzl", "gomock") gomock( name = "go_default_mock", out = "mock.go", interfaces = %s, library = "//%s:go_default_library", package = "mock_%s", ) """ % (self.interfaces.split(","), self.package, name) pathlib.Path(mock_path).mkdir(parents=True, exist_ok=True) pathlib.Path(mock_path / "BUILD.bazel").write_text(buildscript) mock_rule = "//%s:go_default_mock" % os.path.join( self.package, "mock_%s" % name) bazel = plumbum.local['bazel'] bazel("build", mock_rule) bf, wf = rule_to_file(mock_rule) cmd.cp(bf, wf) cmd.chmod("0644", wf) cmd.make("gazelle")
def configure(self): with local.cwd(self.builddir): with open("etc/portage/make.conf", 'w') as makeconf: lines = ''' PATH="/llvm/bin:/pprof/bin:${PATH}" LD_LIBRARY_PATH="/llvm/lib:/pprof/lib:${LD_LIBRARY_PATH}" CFLAGS="-O2 -pipe" CXXFLAGS="${CFLAGS}" FEATURES="-sandbox -usersandbox -usersync -xattr" CC="/llvm/bin/clang" CXX="/llvm/bin/clang++" PORTAGE_USERNAME = "******" PORTAGE_GRPNAME = "root" PORTAGE_INST_GID = 0 PORTAGE_INST_UID = 0 CHOST="x86_64-pc-linux-gnu" USE="bindist mmx sse sse2" PORTDIR="/usr/portage" DISTDIR="${PORTDIR}/distfiles" PKGDIR="${PORTDIR}/packages" ''' makeconf.write(lines) mkdir("-p", "etc/portage/metadata") with open("etc/portage/metadata/layout.conf", 'w') as layoutconf: lines = '''masters = gentoo''' layoutconf.write(lines) cp("/etc/resolv.conf", "etc/resolv.conf")
def prepare(self): super(Povray, self).prepare() cp("-ar", path.join(self.testdir, "cfg"), self.builddir) cp("-ar", path.join(self.testdir, "etc"), self.builddir) cp("-ar", path.join(self.testdir, "scenes"), self.builddir) cp("-ar", path.join(self.testdir, "share"), self.builddir) cp("-ar", path.join(self.testdir, "test"), self.builddir)
def load_source(url_or_path: str) -> plumbum.Path: parsed_url = urlparse(url_or_path) with track_new_files(local.cwd) as new_files: # Load data if parsed_url.scheme.startswith("git+"): original_scheme = parsed_url.scheme[len("git+"):] repo_url = urlunparse(( original_scheme, parsed_url.netloc, parsed_url.path, parsed_url.params, parsed_url.query, "", )) args = ["clone", repo_url, "--depth=1"] if parsed_url.fragment: args.append(f"--branch={parsed_url.fragment}") cmd.git(*args) elif parsed_url.scheme in HTTP_SCHEMES: (cmd.wget["-qO-", url_or_path] | cmd.tar["xvz"])() else: url_path = local.cwd / url_or_path if url_path.is_dir(): cmd.cp("-r", url_path, local.cwd) else: cmd.tar("-xvf", url_or_path) project_path, = new_files return project_path
def backup_vscode(self): for file_name in vs_code_files: target_path = os.path.join(vs_code_config_path, file_name) if os.path.exists(target_path): cp("-r", target_path, vs_code_backup_path) print(colors.green | ('cp %s' % target_path)) else: print(colors.red | ('file not found: %s' % target_path)) print("vscode config backup completed")
def download(self): from pprof.utils.downloader import Wget from plumbum.cmd import patch, cp lp_patch = path.join(self.sourcedir, "linpack.patch") with local.cwd(self.builddir): Wget(self.src_uri, "linpackc.new") cp("-a", "linpackc.new", "linpack.c") (patch["-p0"] < lp_patch)()
def Copy(From, To): """ Small copy wrapper. Args: From (str): Path to the SOURCE. To (str): Path to the TARGET. """ from plumbum.cmd import cp cp("-ar", "--reflink=auto", From, To)
def update_files(self): rules = mock_rules() bazel = plumbum.local['bazel'] print("building mock files...") bazel("build", rules) for rule in rules: print(rule) bf, wf = rule_to_file(rule) cmd.cp(bf, wf) cmd.chmod("0644", wf)
def download(self): from pprof.utils.downloader import Wget from plumbum.cmd import tar, cp p7z_dir = path.join(self.builddir, self.src_dir) with local.cwd(self.builddir): Wget(self.src_uri, self.src_file) tar('xfj', path.join(self.builddir, self.src_file)) cp( path.join(p7z_dir, "makefile.linux_clang_amd64_asm"), path.join(p7z_dir, "makefile.machine"))
def download(self): from pprof.settings import config with local.cwd(self.builddir): Wget(self.src_uri, self.src_file) cp(config["sourcedir"] + "/bin/uchroot", "uchroot") run(fakeroot["tar", "xfj", self.src_file]) rm(self.src_file) with local.cwd(self.builddir + "/usr"): Wget(self.src_uri_portage, self.src_file_portage) run(tar["xfj", self.src_file_portage]) rm(self.src_file_portage)
def recover_vscode(self): for file_name in vs_code_files: target_path = os.path.join(vs_code_config_path, file_name) backup_file_path = os.path.join(vs_code_backup_path, file_name) if os.path.exists(backup_file_path): if os.path.isdir(target_path): cp("-rf", backup_file_path, vs_code_config_path) else: cp("-f", backup_file_path, target_path) print(colors.green | ('cp %s covers %s' % (backup_file_path, target_path))) else: print(colors.red | ('file not found: %s' % backup_file_path)) print("vscode config recover completed")
def unified_auto_man_to_evals(inf, ingoldf, keyin, goldkeyin, dirout, rm_blacklist): """ Converts a unified corpus and manually annotated data into a full full train/dev/test split for use by finn-wsd-eval. """ ps = {} for seg in ["train", "devtest", "dev", "test"]: segoutdir = pjoin(dirout, seg) os.makedirs(segoutdir, exist_ok=True) ps[seg] = get_partition_paths(segoutdir, "corpus") python( filter_py, "split", "--sentences", "1000", inf, ps["devtest"]["unified"], ps["train"]["unified"], keyin, ps["devtest"]["unikey"], ps["train"]["unikey"], ) python( filter_py, "unified-test-dev-split", ps["devtest"]["unified"], ingoldf, ps["devtest"]["unikey"], goldkeyin, ps["dev"]["unified"], ps["dev"]["unikey"], ) cp(ingoldf, ps["test"]["unified"]) cp(goldkeyin, ps["test"]["unikey"]) if rm_blacklist: exclude = ["olla", "ei"] else: exclude = [] for seg in ["train", "dev", "test"]: segoutdir = pjoin(dirout, seg) unified_to_single_eval.callback( "corpus", pjoin(segoutdir, "corpus.xml"), pjoin(segoutdir, "corpus.key"), segoutdir, exclude, )
def do_it(fn): with tarfile.open(fn) as tar: try: tar.getmember('coverage/report_html/report_single.html') print "ok" except KeyError: tar.extract('coverage/codecover.xml', path='.') print "Got it from ", fn print cmd() cp(fn, 'mytar.tar.gz') local['gunzip']('mytar.tar.gz') with tarfile.open('mytar.tar', 'a') as tar: tar.add('report_single.html', arcname='coverage/report_html/report_single.html') local['gzip']('mytar.tar') cp('mytar.tar.gz', fn)
def configure(self): with local.cwd(self.builddir): self.write_bashrc("etc/portage/bashrc") self.write_makeconfig("etc/portage/make.conf") self.write_wgetrc("etc/wgetrc") mkdir("-p", "etc/portage/metadata") self.write_layout("etc/portage/metadata/layout.conf") cp("/etc/resolv.conf", "etc/resolv.conf") config_file = CFG["config_file"].value() if path.exists(str(config_file)): cp(config_file, path.basename(config_file)) wrap_cc_in_uchroot(self.cflags, self.ldflags, self.compiler_extension, "/llvm/bin") wrap_cxx_in_uchroot(self.cflags, self.ldflags, self.compiler_extension, "/llvm/bin")
def make(commit=DEFAULT_HASH): softdir = getSoftDir() blddir = softdir / "ANTs-build" with local.cwd(softdir): repo = downloadGithubRepo('ANTsX/ANTs', commit) sha, date = getCommitInfo(repo) out = get_path(sha) # if output binary directory already exists, then return if checkExists(out): return logging.info("Build code:") blddir.mkdir() with local.cwd(blddir): cmake(repo) import plumbum.cmd plumbum.cmd.make['-j', psutil.cpu_count(logical=False)] & FG # copy ANTs scripts cp('-a', (softdir / 'ANTs' / 'Scripts').list(), blddir / 'ANTS-build' / 'Examples') # move binary directory (blddir / 'ANTS-build' / 'Examples').move(out) # write PATH and ANTSPATH with open(out / 'env.sh', 'w') as f: f.write("export PATH={}:$PATH\n".format(out)) f.write("export ANTSPATH={}\n".format(out)) # generate symbolink links symlink = get_path(date) print("Make symlink: {} -> {}".format(symlink, get_path(sha))) symlink.unlink() get_path(sha).symlink(symlink) logging.info("Made '{}'".format(out))
def install_regolith(): add_apt_repository("ppa:regolith-linux/release") install_with_apt( "regolith-desktop-standard", "regolith-look-dracula", "i3xrocks-battery", "i3xrocks-cpu-usage", "i3xrocks-media-player", "i3xrocks-memory", "i3xrocks-temp", "i3xrocks-time", "i3xrocks-volume", "i3xrocks-weather", "i3xrocks-wifi", ) install_with_pip("i3ipc") # Copy blocks to local install for block_path in local.path("/etc/regolith/i3xrocks/conf.d").iterdir(): cmd.cp(block_path, local.path("~/.config/regolith/i3xrocks/conf.d"))
def unified_to_single_eval(seg, inf, keyin, dirout): """ Converts a unified corpus into all the data the data needed for a single test/train segment by finn-wsd-eval. """ pdict = get_partition_paths(dirout, seg) for src, dest in [(inf, pdict["unified"]), (keyin, pdict["unikey"])]: if samefile(src, dest): continue cp(src, dest) unified_to_sup.callback( pdict["unified"], pdict["unikey"], pdict["sup"], pdict["sup3key"], pdict["supkey"], ) python(munge_py, "finnpos-senseval", pdict["sup"], pdict["suptag"]) python(munge_py, "omorfi-segment-senseval", pdict["sup"], pdict["supseg"])
def checkOverlayEquivalence(zumaDir,yosysDir,vtrDir,vprVersion): abcPath = getVTRToolpath('abc',vtrDir,vprVersion) yosysPath = yosysDir / "yosys" from plumbum.cmd import cp #copy the verification files cp("abc_out_v.blif", str(zumaDir / "verilog/verification/VerificationTestsuite")) cp("verificationOverlay.v", str(zumaDir / "verilog/verification/VerificationTestsuite")) cp("top_module.v", str(zumaDir / "verilog/verification/VerificationTestsuite")) #back up the curent cwd oldcwd = str(local.cwd) #change dir in the testsuite local.cwd.chdir(zumaDir / "verilog/verification/VerificationTestsuite") #run yosys yosys = local[yosysPath] print yosys("-s","specification.ys") print yosys("-s","removeports.ys") print yosys("-s","removeports_abc.ys") #run the equivalence check result = runAbcEquivalenceCheck(abcPath,"abc_out_v_opt.blif","test_opt.blif") #return to the old dir local.cwd.chdir(oldcwd) return result
def unified_auto_man_to_evals(inf, ingoldf, keyin, goldkeyin, dirout): ps = {} for seg in ["train", "devtest", "dev", "test"]: segoutdir = pjoin(dirout, seg) os.makedirs(segoutdir, exist_ok=True) ps[seg] = get_partition_paths(segoutdir, "corpus") python( filter_py, "split", "--sentences", "1000", inf, ps["devtest"]["unified"], ps["train"]["unified"], keyin, ps["devtest"]["unikey"], ps["train"]["unikey"], ) python( filter_py, "unified-test-dev-split", ps["devtest"]["unified"], ingoldf, ps["devtest"]["unikey"], goldkeyin, ps["dev"]["unified"], ps["dev"]["unikey"], ) cp(ingoldf, ps["test"]["unified"]) cp(goldkeyin, ps["test"]["unikey"]) for seg in ["train", "dev", "test"]: segoutdir = pjoin(dirout, seg) unified_to_single_eval.callback( "corpus", pjoin(segoutdir, "corpus.xml"), pjoin(segoutdir, "corpus.key"), segoutdir, )
def unified_to_single_eval(seg, inf, keyin, dirout, exclude): """ Converts a unified corpus into all the data the data needed for a single test/train segment by finn-wsd-eval. This creates a simple train/test split. For a full train/dev/test split including manually annotated data use unified-auto-man-to-evals. """ pdict = get_partition_paths(dirout, seg) for src, dest in [(inf, pdict["unified"]), (keyin, pdict["unikey"])]: if samefile(src, dest): continue cp(src, dest) unified_to_sup.callback( pdict["unified"], pdict["unikey"], pdict["sup"], pdict["sup3key"], pdict["supkey"], exclude, ) python(munge_py, "finnpos-senseval", pdict["sup"], pdict["suptag"]) python(munge_py, "omorfi-segment-senseval", pdict["sup"], pdict["supseg"])
def main(self): tmpdir = local.path(tempfile.mkdtemp(prefix='topogen.')) try: scion_pki = local.path(self.scion_pki_bin) topogen = local[self.topogen_bin] local.env.path.insert(0, scion_pki.parent) # bazel only creates a symlink to the crypto_lib, # we copy it to tmp so that it works with docker. cp('-L', local.path('./scripts/cryptoplayground/crypto_lib.sh'), tmpdir) topogen_args = ['-o', tmpdir / 'gen', '-c', self.topo] if self.params != '': topogen_args += self.params.split() with local.env(CRYPTOLIB=tmpdir / 'crypto_lib.sh'): print('Running topogen with following arguments: ' + ' '.join(topogen_args)) print(topogen(*topogen_args)) # Remove the explicit scion root dir, so that tests can adapt this to wherever they # unpack the tar. sed('-i', 's@%s@$SCIONROOT@g' % local.path('.'), tmpdir / 'gen' / 'scion-dc.yml') sed('-i', 's@%s@$SCIONROOT@g' % tmpdir, tmpdir / 'gen' / 'scion-dc.yml') for support_dir in ['logs', 'gen-cache', 'gen-data', 'traces']: os.mkdir(tmpdir / support_dir) tar('-C', tmpdir, '-cf', self.outfile, '.') finally: shutil.rmtree(tmpdir, ignore_errors=True)
def setup(self): install('nginx') substitute( '# gzip_types text/plain text/css application/json application/javascript text/xml application/xml application/xml+rss text/javascript;', 'gzip_types text/plain text/css application/json application/javascript text/xml application/xml application/xml+rss text/javascript;', '/etc/nginx/nginx.conf' ) substitute( '# gzip_types text/plain text/css application/json application/x-javascript text/xml application/xml application/xml+rss text/javascript;', 'gzip_types text/plain text/css application/json application/x-javascript text/xml application/xml application/xml+rss text/javascript;', '/etc/nginx/nginx.conf' ) if os.path.isfile('/var/www/html/index.nginx-debian.html'): cp('/var/www/html/index.nginx-debian.html', '/var/www/html/index.html') # For SLL on ELB's http://stackoverflow.com/questions/24603620/redirecting-ec2-elb-from-http-to-https write( ''' server { listen 81; return 301 https://$host$request_uri; } ''', '/etc/nginx/conf.d/000-listen-81.conf' )
source_path = os.path.dirname(os.path.abspath(script_path_inspect)) # The absolute path to the current working directory, where this script # is being launched. current_path = local.cwd # Take in the first argument or option which is the project directory. project_dir = sys.argv[1] # Define absolute path to project directory. project_path = current_path + "/" + project_dir with colors.orchid: print "========== Setting up your project directory ==========" mkdir(project_dir) with colors.orchid: print "========== Installing .gitignore ==========" cp(source_path + "/.gitignore", project_path) with colors.orchid: print "========== Installing Procfile and Runtime for Heroku ==========" cp(source_path + "/Procfile", project_path) cp(source_path + "/runtime.txt", project_path) with colors.orchid: print "========== Installing Vagrant environment ==========" cp(source_path + "/Vagrantfile", project_path) # Install provisioning script. cp(source_path + "/provision.sh", project_path) # Installing requirements.txt cp(source_path + "/requirements.txt", project_path) cp(source_path + "/.bashrc", project_path) cp(source_path + "/zeus.sh", project_path)
def appimage(project, logger): from plumbum import cmd, local arch = project.get_property("appimage_arch") # TODO make the 32 or 64 be valid input options (easier) assert arch in ('i686', 'x86-64', 'armv6l') name_lower = name.lower() p_output = project.expand_path("$dir_target") appimgdir = join(p_output, "appimage", name + version, 'in', name + '.AppDir') try: os.makedirs(appimgdir, exist_ok=False) except FileExistsError: print() print('Appimage dir already exists for version {}, overwrite (y/N)'. format(version)) force = project.get_property('appimage_force') if force in (True, "1", "True", "true"): logger.warn("appimage: overwriting as appimage_force is True") # Delete the entire "in" folder shutil.rmtree(os.path.dirname(appimgdir)) os.makedirs(appimgdir, exist_ok=False) # Prompt to overwrite or not else: ans = input() if ans in ('y', 'Y'): # Delete the entire "in" folder shutil.rmtree(os.path.dirname(appimgdir)) os.makedirs(appimgdir, exist_ok=False) elif ans in ('n', 'N', ''): logger.info('appimage: nothing done') raise Exception else: logger.warning('appimage: Expected either y or n, recieved: ' + ans) raise Exception os.chdir(appimgdir) shutil.rmtree('usr', ignore_errors=True) logger.info('appimage: creating virtualenv in AppDir') cmd.virtualenv('usr') logger.info('appimage: Installing python libraries') pip = local['usr/bin/pip'] for dep in project.dependencies: logger.info('appimage: installing dep: ' + str(dep)) # TODO look how pybuilder installs deps... pip['install', dep]() logger.info('appimage: Copy our pybuilder bundled app into the appdir') src = project.expand_path('$dir_dist') shutil.copytree(src, join(appimgdir, name_lower)) # TODO can we reorganize our projecet so that this isn't necessary? # Installs our own projet using pip -install -e # TODO make a flag for this option, also a flag to use a virtualenv from a path src_setup = join( project.basedir, os.path.dirname(project.get_property("dir_source_main_python")), 'setup.py') dst_setup = join(appimgdir, 'setup.py') shutil.copy(src_setup, dst_setup) pip['install', '-e', appimgdir]() # TODO auto insert bangline for correct python version logger.info('appimage: create symlink to script files') script_dir = join(name_lower, 'scripts') for script in os.listdir(script_dir): file = join(script_dir, script) pointing_to = join(os.pardir, os.pardir, file) if os.path.isfile(file): os.symlink(pointing_to, 'usr/bin/' + script) os.chmod(file, A_X) with open(join(appimgdir, name_lower + '.desktop'), 'w') as f: data = ('[Desktop Entry]', 'Name=' + name, 'Exec=' + project.get_property('main_entry_point'), 'Icon=' + name_lower) f.write('\n'.join(data)) logger.info('appimage: moving in files') cmd.cp(project.get_property('appimage_icon'), name_lower + '.png') logger.info('appimage: getting apprun') get_apprun() os.chdir(os.pardir) logger.info('appimage: getting/running AppImageAssistant') generate_appimage(name, version, arch)
def setup(self): write_line("TMPTIME=14", '/etc/default/rcS', 'TMPTIME=', mode='644') with local.env(DEBIAN_FRONTEND='noninteractive'): install('tmpreaper') cp(files_folder + '/tmpreaper.conf', '/etc/tmpreaper.conf')
def copy_file(src, dst): cmd.mkdir("-p", os.path.dirname(dst)) cmd.cp("-L", src, dst)
def prepare(self): super(Gzip, self).prepare() testfiles = [path.join(self.testdir, x) for x in self.testfiles] cp(testfiles, self.builddir)
def prepare(self): super(X264, self).prepare() testfiles = [path.join(self.testdir, x) for x in self.inputfiles] for testfile in testfiles: cp(testfile, self.builddir)
def prepare(self): super(Lammps, self).prepare() from plumbum.cmd import cp with local.cwd(self.builddir): cp("-vr", self.testdir, "test")
def prep_for_mk_tar(dic): for k, v in dic.iteritems(): if k != v: cp(k, v)
def configure(self): from plumbum.cmd import cp with local.cwd(self.builddir): cp("-ar", path.join(self.src_dir, self.path_dict[self.name], self.name), self.name + ".dir") cp("-ar", path.join(self.src_dir, "utilities"), ".")
def prepare(self): super(Postgres, self).prepare() testfiles = [path.join(self.testdir, x) for x in self.testfiles] for test_f in testfiles: cp("-a", test_f, self.builddir)
def prepare(self): super(XZ, self).prepare() from plumbum.cmd import cp testfiles = [path.join(self.testdir, x) for x in self.testfiles] cp(testfiles, self.builddir)