def start_mzbench_server(): if 'MZBENCH_RSYNC' in os.environ: node_location_param = '{{node_rsync, "{0}"}},'.format(os.environ['MZBENCH_RSYNC']) elif 'MZBENCH_REPO' in os.environ: node_location_param = '{{node_git, "{0}"}},'.format(os.environ['MZBENCH_REPO']) else: node_location_param = '' with open(dirname + "/test_server.config", "w") as config: config.write('[{{mzbench_api, [{0}{{node_log_port, 0}}, {{node_management_port, 0}}]}}].'.format(node_location_param)) with open('{0}/test_server.config'.format(dirname), 'r') as f: print(f.read()) cmd('{0} start_server --config {1}/test_server.config'.format(mzbench_script, dirname)) try: time.sleep(3) # give server some time to start yield except: print '' print '-------------------- >> begin server logs << ---------------------' logdir = os.path.join(mzbench_dir + 'server/_build/default/rel/mzbench_api/log') logfiles = [logfile for logfile in os.listdir(logdir)] logfile = sorted([os.path.join(logdir, l) for l in logfiles if l.startswith('erlang')], key=os.path.getmtime, reverse=True)[0] with open(logfile) as f: for line in f: print line.rstrip().replace('\\n', '\n') print '-------------------- >> end server logs << ---------------------' print '' raise finally: cmd('{0} stop_server'.format(mzbench_script))
def target_merge_busybox(self): env = {} use = '' if 'USE' in self.env: use = self.env['USE'] if len(self.package_list) == 0: use += ' static' use += ' make-symlinks' env.update(self.env) env['USE'] = use env['ROOT'] = self.target_root cmdline = '%s/inhibitor-run.sh run_emerge --newuse --nodeps sys-apps/busybox' \ % self.env['INHIBITOR_SCRIPT_ROOT'] if self.seed: util.chroot( path = self.target_root, function = util.cmd, fargs = {'cmdline':cmdline, 'env':env}, failuref = self.chroot_failure ) util.chroot( path = self.target_root, function = self.path_sync_callback, fargs = {'src':'/bin/busybox', '_':None}, failuref = self.chroot_failure ) else: util.cmd( cmdline, env = env ) self.path_sync_callback('/bin/busybox', None)
def pull(args: Args) -> None: """Update ``args.repo`` with ``git pull``.""" if args.pull: cmd("git pull --no-edit") proc = subprocess.run(args.sudo_prefix + ["git", "pull", "--no-edit"], check=False, cwd=args.repo) if proc.returncode != 0: # git pull failed, maybe reset? if args.reset: run_or_fatal( args.sudo_prefix + ["git", "reset", "--hard", args.remote_branch], log=True, cwd=args.repo, ) else: fatal( "`git pull` failed. Pass `--reset` to reset the repository." ) info(f"{args.repo} is now at commit:") subprocess.run(["git", "log", "HEAD^1..HEAD", "--oneline"], check=False, cwd=args.repo)
def pack(self): emb_root = self.target_root if self.seed: emb_root = emb_root.pjoin(self.target_root) basedir = util.Path( os.path.dirname(self.tarpath) ) util.mkdir(basedir) archive = tarfile.open(self.tarpath, 'w:bz2') archive.add(emb_root, arcname = '/', recursive = True ) archive.close() curdir = os.path.realpath(os.curdir) os.chdir(emb_root) util.cmd('find ./ | cpio -H newc -o | gzip -c -9 > %s' % (self.cpiopath)) os.chdir(curdir) if self.kernel: r = util.Path('/') if self.seed: r = self.target_root r = r.pjoin('/tmp/inhibitor/kernelbuild') kernel_link = r.pjoin('/boot/kernel') kernel_path = os.path.realpath( kernel_link ) if os.path.lexists(self.kernlinkpath): os.unlink(self.kernlinkpath) shutil.copy2(kernel_path, basedir.pjoin( os.path.basename(kernel_path) )) os.symlink(os.path.basename(kernel_path), self.kernlinkpath)
def run(workdir, proj, bug, onlyFailing, onlyRelevant, onlyTest): cmd("{0}/defects4j/framework/bin/defects4j compile -w {0}/data/{1}/{2}b". format(workdir, proj, bug)) try: os.makedirs("{0}/result/".format(workdir)) except OSError as e: if e.errno != errno.EEXIST: raise if onlyFailing: for t in failing_test.get_raw(workdir, proj, bug): cmd("{0}/defects4j/framework/bin/defects4j test -w {0}/data/{1}/{2}b -t {3}" .format(workdir, proj, bug, t)) elif onlyRelevant: cmd("{0}/defects4j/framework/bin/defects4j test -w {0}/data/{1}/{2}b -f" .format(workdir, proj, bug)) elif onlyTest: cmd("{0}/defects4j/framework/bin/defects4j test -w {0}/data/{1}/{2}b -t {3}" .format(workdir, proj, bug, onlyTest)) else: cmd("{0}/defects4j/framework/bin/defects4j test -w {0}/data/{1}/{2}b". format(workdir, proj, bug)) with open("result/error.log") as log: for line in log: if "[ERROR]" in line: raise ValueError("Error in log: " + line)
def diff_hw_config(hardware_cfg: Path) -> None: """Diff changes if we updated the hardware configuration. :param hardware_cfg: path to ``hosts/$(hostname)-hardware-configuration.nix`` """ # Figure out what would happen if we ran nixos-generate-config. info("Diff if `nixos-generate-config` was run:") cmd( "nixos-generate-config --show-hardware-config " + f"| diff --ignore-all-space {hardware_cfg} - " + "| delta" ) new_cfg = get_output(["nixos-generate-config", "--show-hardware-config"]) diff = get_output( [ "diff", "--report-identical-files", "--new-file", "--unified", "--ignore-all-space", str(hardware_cfg), "-", ], input=new_cfg, # 1 just means we found differences ok_returncodes=[0, 1], ) delta = subprocess.run(["delta"], input=diff, encoding="utf-8", check=False) if delta.returncode != 0: warn(f"delta exited with non-zero return code {delta.returncode}")
def restore(self, source, specificFileToRestore='', restoreTo=''): """source: name of source group to restore specificFileToRestore: relative path of a specific file/directory to restore restoreTo: path string of restore location""" sourceGrp = self.sourceGroups[source] if sourceGrp is None: raise Exception("Cannot restore: no such source group "+ "associated with link - {0}".format(source)) os.environ['PASSPHRASE'] = getpwd() options = [] # duplicity currently doesn't support the 'include' # and 'exclude' options when performing a restore # these options are what allow us to backup in a single command above. # instead, we use a series of multiple commands with 'file-to-restore' option # to perform the same role. for source in souceGrp.items: path = source.path # We must make it a relative path (i.e. remove leading slash) if path.startswith('/'): path = path[1:] cmdList = ["duplicity", "restore", "--file-to-restore='{0}'".format(path), self.getTarget().path, '/' if restoreTo is '' else restoreTo ] cmd(cmdList) # XXX #if specificFileToRestore is not '': # options.append('--file-to-restore="{0}"'.format(specificFileToRestore)) del os.environ['PASSPHRASE'] self.doneSomething()
def sync(self): if os.path.exists(self.builddir): shutil.rmtree(self.builddir) elif os.path.islink(self.builddir): os.unlink(self.builddir) os.makedirs(self.builddir) exclude_cmd = '' if self.exclude: for i in self.exclude: exclude_cmd += " --exclude='%s'" % i if self.include: for pattern in self.include: paths = [self.src.cachedir.pjoin(pattern)] if '*' in pattern: paths = glob.glob(self.src.cachedir.pjoin(pattern)) for path in paths: dest = path.replace(self.src.cachedir, self.builddir) if not os.path.lexists( os.path.dirname(dest) ): os.makedirs( os.path.dirname(dest) ) util.cmd('rsync -a %s %s/ %s/' % ( exclude_cmd, path, dest )) else: util.cmd('rsync -a %s %s/ %s/' % (exclude_cmd, self.src.cachedir, self.builddir))
def checkout(workdir, proj, bug): dir = "{0}/data/{1}/{2}b".format(workdir, proj, bug) if not os.path.exists(dir): os.makedirs(dir) cmd("{0}/defects4j/framework/bin/defects4j checkout -p {1} -v {2}b -w {3}" .format(workdir, proj, bug, dir)) else: print "Skip checkout because {0} already exists".format(dir)
def check_migrations_test(): mzbench_data_dir = tempfile.mkdtemp(prefix='mzbench_data_') with start_mzbench_server(custom_data_location=mzbench_data_dir): for i in range(5): run_successful_bench(scripts_dir + 'correct_script.erl') try: cmd(mzbench_dir + '/bin/migrate.py ' + mzbench_data_dir) finally: shutil.rmtree(mzbench_data_dir)
def rm_tabsint_plugin_dependencies(pluginDict): """ remove cordova dependencies part of plugins from config """ try: for name, plugin in pluginDict.iteritems(): if "cordova" in plugin: ut.cmd("npm run cordova --silent -- plugin rm " + plugin["cordova"]["package"], force=True) except: sys.exit( "[ERROR]: Error while trying to uninstall plugin: {0}. Try manually uninstalling using 'npm run cordova --silent -- plugin rm {1}'".format( plugin["package"], plugin["package"] ) )
def merge_packages(self): cmdline = '%s/inhibitor-run.sh run_emerge --newuse %s' % ( self.env['INHIBITOR_SCRIPT_ROOT'], ' '.join(self.package_list)) if self.seed: util.chroot( path = self.target_root, function = util.cmd, fargs = {'cmdline':cmdline, 'env':self.env}, failuref = self.chroot_failure ) else: util.cmd( cmdline, env = self.env )
def update_hw_config_force(hardware_cfg: Path) -> None: """Generate and replace the hardware configuration. Performs no safety checks, but doesn't write if ``DRY_RUN`` is true. :param hardware_cfg: Path to ``hosts/$(hostname)-hardware-configuration.nix`` file to replace. """ info(("Updating " if hardware_cfg.exists() else "Generating ") + p(hardware_cfg)) cmd(f"nixos-generate-config --show-hardware-config > {hardware_cfg}") new_hardware_config = get_output( ["nixos-generate-config", "--show-hardware-config"] ) if not DRY_RUN: hardware_cfg.write_text(new_hardware_config)
def add_tabsint_plugin_dependencies(pluginDict): """ add dependent cordova plugins for tabsint plugins listed in config.json""" try: for name, plugin in pluginDict.iteritems(): if "cordovaPlugins" in plugin: for p in plugin["cordovaPlugins"]: ut.cmd("npm run cordova --silent -- plugin add {0}".format(p)) ut.log.info( '[BUILD]: Successfully installed the cordova plugin "{0}" required by tabsint plugin "{1}"'.format( p, name ) ) except Exception as e: rm_tabsint_plugins() sys.exit('[ERROR]: Error while trying to "cordovaPlugins" from with error: {0}'.format(str(e)))
def unpack_seed(self): if not os.path.isdir(self.seed): if os.path.exists(self.seed): os.unlink(self.seed) seedfile = self.seed + ".tar.bz2" util.info("Unpacking %s" % seedfile) os.makedirs(self.seed) try: util.cmd("tar -xjpf %s -C %s/" % (seedfile, self.seed)) except: shutil.rmtree(self.seed) raise util.info("Syncing %s to %s" % (self.seed.dname(), self.target_root.dname())) util.cmd("rsync -a --delete %s %s" % (self.seed.dname(), self.target_root.dname()))
def start_mzbench_server(custom_data_location=None): if 'MZBENCH_RSYNC' in os.environ: node_location_param = '{{node_rsync, "{0}"}},'.format( os.environ['MZBENCH_RSYNC']) elif 'MZBENCH_REPO' in os.environ: node_location_param = '{{node_git, "{0}"}},'.format( os.environ['MZBENCH_REPO']) else: node_location_param = '' if custom_data_location: custom_data_location_param = '{{bench_data_dir, "{0}"}},'.format( custom_data_location) else: custom_data_location_param = '' with open(dirname + "/test_server.config", "w") as config: config.write( '[{{mzbench_api, [{0} {1} {{node_log_port, 0}}, {{node_log_user_port, 0}}, {{node_management_port, 0}}, {{node_interconnect_port, 0}}]}}].' .format(node_location_param, custom_data_location_param)) with open('{0}/test_server.config'.format(dirname), 'r') as f: print(f.read()) cmd('{0} start_server --config {1}/test_server.config'.format( mzbench_script, dirname)) try: time.sleep(3) # give server some time to start yield except: print '' print '-------------------- >> begin server logs << ---------------------' logdir = os.path.join(mzbench_dir + 'server/_build/default/rel/mzbench_api/log') logfiles = [logfile for logfile in os.listdir(logdir)] logfile = sorted([ os.path.join(logdir, l) for l in logfiles if l.startswith('erlang') ], key=os.path.getmtime, reverse=True)[0] with open(logfile) as f: for line in f: print line.rstrip().replace('\\n', '\n') print '-------------------- >> end server logs << ---------------------' print '' raise finally: cmd('{0} stop_server'.format(mzbench_script))
def backup(self, incremental=False): includes = [] for k, group in self.sourceGroups.items(): for item in group.items: includes.append('--include={0}'.format(item.path)) os.environ['PASSPHRASE'] = getpwd() cmdList = ["duplicity", "incremental" if incremental else "full", "--name='{0}'".format(self.targetGroup.name)] cmdList.extend(includes) cmdList.extend(["--exclude=**", "/", self.getTarget().geturl()]) cmd(cmdList) del os.environ['PASSPHRASE'] self.doneSomething()
def devtool_list_templates_test(): templates = os.listdir(mzbench_dir + "worker_templates") got_templates = filter(lambda x: x, cmd(mzbench_dir + "bin/mzbench list_templates").split("\n")) if sorted(templates) != sorted(got_templates): print sorted(templates) print sorted(got_templates) assert sorted(templates) == sorted(got_templates)
def main(args: Optional[Args] = None) -> None: """Entry point.""" if args is None: args = Args.parse_args() # Okay, so we don't actually use `os.chdir` here. Why? If we split a panel # while rebuilding (or open a new window), tmux starts the new shell in the # current process' cwd. Therefore, so we don't end up accidentally mucking # around in `/etc/nixos`, we don't change the cwd and instead use # `cwd=args.repo` for `subprocess.run` invocations. cmd(f"cd {p(args.repo)}") if args.fix_full_boot: fix_full_boot(args) pull(args) rebuild(args.rebuild_args, args.sudo_prefix, args.repo)
def devtool_list_templates_test(): templates = os.listdir(mzbench_dir + 'worker_templates') got_templates = filter( lambda x: x, cmd(mzbench_dir + 'bin/mzbench list_templates').split('\n')) if sorted(templates) != sorted(got_templates): print sorted(templates) print sorted(got_templates) assert sorted(templates) == sorted(got_templates)
def get_tabsint_plugin_docs(name, src, version=None): """ include tabsint plugin documentation """ def copy_userguide(path, ext="rst"): installPath = "../docs/userguide/src/docs/plugins" try: os.mkdir(installPath) except: pass try: shutil.copy("{0}/index.{1}".format(path, ext), "{0}/plugin-{1}.{2}".format(installPath, name, ext)) # copy any other ancillary files - must be named the same as the plugin! if name in os.listdir(path): shutil.copytree("{0}/{1}".format(path, name), "{0}/{1}/".format(installPath, name)) except Exception as e: sys.exit('[ERROR]: Failed to copy docs for plugin "{0}" to userguide. Error: {1} '.format(name, str(e))) # git files if any(s in src for s in ["https://", ".git"]): [repo, subdir] = ut.checkout(src, tag=version) docPath = ".tmp/{0}/{1}".format(repo, subdir) # local files else: src = "../" + src ut.check_tag(src, version) if src.endswith("/"): docPath = src[:-1] else: docPath = src # user guide if "index.rst" in os.listdir(docPath): copy_userguide(docPath) elif "index.md" in os.listdir(docPath): copy_userguide(docPath, ext="md") ut.cmd("rm -rf .tmp", force=True) # remove temp git directory, if its there ut.log.info('[BUILD]: Successfully retrieved docs for tabsint plugin "{0}"'.format(name))
def get_tabsint_plugin(name, src, version=None): """ get tabsint plugin from git repository or local folder""" def copy_to_plugins(path): shutil.rmtree("../www/tabsint_plugins/{0}".format(name), True) shutil.copytree(path, "../www/tabsint_plugins/{0}".format(name)) # git repos if any(s in src for s in ["https://", ".git"]): [repo, subdir] = ut.checkout(src, tag=version) copy_to_plugins(".tmp/{0}/{1}".format(repo, subdir)) ut.cmd("rm -rf .tmp") # remove temp git directory # local files else: src = "../" + src # make path relative to top level directory ut.check_tag(src, version) copy_to_plugins(src) ut.log.info('[BUILD]: Successfully retrieved the source for tabsint plugin "{0}"'.format(name))
def get_raw(workdir, proj, bug): output = cmd( "{0}/defects4j/framework/bin/defects4j info -p {1} -b {2}".format( workdir, proj, bug)) tests = re.findall( """Root cause in triggering tests: (.*) -------------------------------------------------------------------------------- List of modified sources:""", output, re.DOTALL)[0].split('\n') tests = filter(lambda x: x.startswith(' - '), tests) return map(lambda x: x.replace(' - ', ''), tests)
def render(self, font): with self.cache_lock: if self.invalid: self.cache = self.do_render() text_no_markup = re.sub('\<[^\>]+\>|\^[^\(]*\([^\)]*\)', '', self.cache) self.cache_width = int(cmd('textwidth', font, text_no_markup)) self.invalid = False return self.cache
def list_plugins(): """ retrieve list of plugins and versions""" try: [code, stdout] = ut.cmd("npm run cordova --silent -- plugins ls", suppress=True) plugins = [p for p in stdout.replace("\n", ",").split(",") if p != ""] plugins = [p.split(' "')[0] for p in plugins] return plugins except: sys.exit( "[ERROR]: Error while trying to get a list of plugins from cordova. Try running 'npm run cordova --silent -- plugins ls', or for deeper investigation see the function 'list_plugins()' in /bin/util.py" )
def devtool_run_local_tests(): run_erl_and_bdl('validate', 'loop_rate') run_erl_and_bdl('validate', 'env', ' --env pool_size=20 --env jozin=jozin --env wait_ms=100') run_erl_and_bdl('run_local', 'loop_rate') run_erl_and_bdl('run_local', 'data_script') try: cmd(mzbench_dir + 'bin/mzbench run_local ' + scripts_dir + 'syntax_error.erl') assert False except subprocess.CalledProcessError: pass try: cmd(mzbench_dir + 'bin/mzbench run_local ' + scripts_dir + 'semantic_error.erl') assert False except subprocess.CalledProcessError: pass
def run(workdir, proj, bug, mem): tests = failing_test.get_with_lines(workdir, proj, bug) tests = ";".join(tests) for filename in os.listdir("{0}/data/info/{1}/{2}".format( workdir, proj, bug)): src = "{0}/data/info/{1}/{2}/{3}".format(workdir, proj, bug, filename) dest = "{0}/result/info/{1}".format(workdir, filename) try: os.makedirs("{0}/result/info".format(workdir)) except OSError as e: if e.errno != errno.EEXIST: raise shutil.copy(src, dest) if mem == None: cmd("java -jar {0}/falo/falo/target/falo-0.1-jar-with-dependencies.jar '{0}' '{1}'" .format(workdir, tests)) else: cmd("java -Xmx{2}g -jar {0}/falo/falo/target/falo-0.1-jar-with-dependencies.jar '{0}' '{1}'" .format(workdir, tests, mem))
def start_mzbench_server(): if 'MZBENCH_RSYNC' in os.environ: node_location_param = '{{mzbench_rsync, "{0}"}}'.format(os.environ['MZBENCH_RSYNC']) elif 'MZBENCH_REPO' in os.environ: node_location_param = '{{mzbench_git, "{0}"}}'.format(os.environ['MZBENCH_REPO']) else: node_location_param = '' with open(dirname + "/test_server.config", "w") as config: config.write('[{{mzbench_api, [{0}]}}].'.format(node_location_param)) with open('{0}/test_server.config'.format(dirname), 'r') as f: print(f.read()) cmd('{0} start_server --config {1}/test_server.config'.format(mzbench_script, dirname)) try: yield except: print '' print '-------------------- >> begin server logs << ---------------------' print cmd('cat ' + mzbench_dir + '/server/log/console.log').replace('\\n', '\n') print '-------------------- >> end server logs << ---------------------' print '' raise finally: cmd('{0} stop_server'.format(mzbench_script))
def start_mzbench_server(): if 'MZBENCH_RSYNC' in os.environ: node_location_param = '{{mzbench_rsync, "{0}"}}'.format( os.environ['MZBENCH_RSYNC']) elif 'MZBENCH_REPO' in os.environ: node_location_param = '{{mzbench_git, "{0}"}}'.format( os.environ['MZBENCH_REPO']) else: node_location_param = '' with open(dirname + "/test_server.config", "w") as config: config.write('[{{mzbench_api, [{0}]}}].'.format(node_location_param)) with open('{0}/test_server.config'.format(dirname), 'r') as f: print(f.read()) cmd('{0} start_server --config {1}/test_server.config'.format( mzbench_script, dirname)) try: yield except: print '' print '-------------------- >> begin server logs << ---------------------' print cmd('cat ' + mzbench_dir + '/server/log/console.log').replace( '\\n', '\n') print '-------------------- >> end server logs << ---------------------' print '' raise finally: cmd('{0} stop_server'.format(mzbench_script))
def merge_kernel(self): args = ['--build_name', self.build_name, '--kernel_pkg', '\'%s\'' % (self.kernel.kernel_pkg,)] cmdline = '%s/kernel.sh %s' % ( self.env['INHIBITOR_SCRIPT_ROOT'], ' '.join(args) ) env = {} env.update(self.env) env['ROOT'] = '/tmp/inhibitor/kernelbuild' if self.seed: util.mkdir( self.target_root.pjoin(env['ROOT']) ) util.chroot( path = self.target_root, function = util.cmd, fargs = {'cmdline':cmdline, 'env':env}, failuref = self.chroot_failure, ) # Grab any modules or firmware and put them into the embedded root fs. for d in ('modules', 'firmware'): util.chroot( path = self.target_root, function = util.cmd, fargs = {'cmdline': 'rsync -a --delete-after %s %s/' % ( '/tmp/inhibitor/kernelbuild/lib/%s' % (d,), self.target_root.pjoin('lib'))}, failuref = self.chroot_failure, ) else: util.cmd( cmdline, env ) # Grab any modules or firmware and put them into the embedded root fs. for d in ('modules', 'firmware'): util.cmd('rsync -a --delete-after %s %s/' % ( '/tmp/inhibitor/kernelbuild/lib/%s' % (d,), self.target_root.pjoin('lib')))
def systemRefresh(): """ Up """ log('UPDATING SYSTEM SPICE') chdir('/home/ec2-user/spicerackclient') cmd('git pull') cmd('tar -cvf /home/ec2-user/system.tar system') chdir('/home/ec2-user') currentversion = dget('systemversion', 0) currentversion = 1 + int(currentversion) put('systemversion', currentversion) with open('systemversion.txt', 'w') as f: f.write(str(currentversion)) cmd('tar --append --file=system.tar systemversion.txt') log('UPDATED SYSTEM SPICE TO VERSION: %s' % currentversion) return 'success'
def init(self): self.clean_cache() if os.path.isdir(self.gitdir): util.cmd('git reset --hard HEAD', env=self.env, chdir=self.cachedir) util.cmd('git clean -f -d -x', env=self.env, chdir=self.cachedir) util.cmd('git checkout master', env=self.env, chdir=self.cachedir) util.cmd('git pull', env=self.env, chdir=self.cachedir) else: util.cmd('git clone %s %s' % (self.src, self.cachedir)) _, branches = util.cmd_out('git branch -l', env=self.env, chdir=self.cachedir) if 'inhibitor' in branches: util.cmd('git branch -D inhibitor', env=self.env, chdir=self.cachedir) if self.rev != 'HEAD': util.cmd('git checkout -b inhibitor %s' % self.rev, env=self.env, chdir=self.cachedir) else: _, self.rev = util.cmd_out('git rev-parse HEAD', env=self.env) self.rev = self.rev[:7]
#!/usr/bin/env python import os, util util.chdir() ports = util.get_ports() if not os.path.exists('rethinkdb_data'): util.cmd('rethinkdb create -d "rethinkdb_data"') util.cmd('rethinkdb serve --cluster-port {cluster_port} --driver-port {driver_port} --no-http-admin'.format( driver_port=ports['rethinkdb'], cluster_port=ports['rethinkdb_cluster']))
#!/usr/bin/env python import os, sys path = os.path.split(os.path.realpath(__file__))[0] os.chdir(path) sys.path.insert(0, path) import util util.chdir() util.cmd( "cd ../../ && git submodule update --init && . smc-env && cd examples && env OUTDIR=../webapp-lib/examples make && cd .. && npm run webpack-watch" )
#!/usr/bin/env python import util util.chdir() util.cmd("cd ../../; npm run webpack-watch-map")
import sys sys.path.append("ext") import webbrowser import os import util, calc, twitter from time import strftime util.cls() txt = raw_input("> ").lower() opt = util.cmd(txt) class Bot(): while "bye" not in opt: if "hi" in opt: print "Hi there! The time is " + strftime("%H:%M:%S") elif "today" in opt: print "Now is", strftime("%A, %d %b %Y %X") elif "irc" in opt: os.system("irssi") elif "thanks" in opt: print "You're welcome" elif "google" in opt: keyword = util.content(opt,"google") if len(keyword) >= 1: util.google(keyword) else: print "No input entered." elif "wiki" in opt: keyword = util.content(opt,"wiki") if len(keyword) >= 1: util.wiki(keyword)
#!/usr/bin/env python import os, sys path = os.path.split(os.path.realpath(__file__))[0] os.chdir(path) sys.path.insert(0, path) import util util.chdir() util.cmd( "cd ../../ && git submodule update --init && . smc-env && npm run webpack-watch" )
def _cmd_list_network(self): return cmd("sudo wpa_cli list_networks")
def process(self, opt, arg, cli): # FIXME cd 命令不会生效,我看了下ipython中也一样,所以就不处理了 log.log(cmd(' '.join(arg)))
def _cmd_reconfigure_network(self): return cmd("sudo wpa_cli reconfigure")
def _cmd_add_network(self): return cmd("sudo wpa_cli add_network")
def log_compression_test(): bench_id = run_successful_bench(scripts_dir + 'correct_script.erl') log_cmd = 'curl --head -X GET http://localhost:4800/log?id={0}'.format(bench_id) assert("content-encoding: deflate" in cmd(log_cmd))
def run_bench(name=None, worker_package_with_default_scenario=None, nodes=None, workers_per_node=None, env={}, email=None, should_fail=False, max_retries=2, exclusive_node_usage=False, expected_log_message_regex=None, check_log_function=None, post_start=None): email_option = ('--email=' + email) if email else '' if workers_per_node: nodes_option = '--workers_per_node ' + str(workers_per_node) else: if nodes: nodes_option = '--nodes ' + ','.join(nodes) else: nodes_option = '--nodes 1' env_option = ' '.join(('--env={0}={1}'.format(k, v) for k, v in env.iteritems())) def run(): if 'worker_branch' in env: node_commit_arg = '--node_commit={0}'.format(env['worker_branch']) else: node_commit_arg = '' flags = ' '.join([ '--host=localhost:4800', '--exclusive_node_usage=' + ('true' if exclusive_node_usage else 'false'), node_commit_arg, nodes_option, env_option, email_option]) if name is not None: invocation = mzbench_dir + 'bin/mzbench ' + flags + ' start ' + name elif worker_package_with_default_scenario is not None: invocation = mzbench_dir + 'bin/mzbench ' + flags + ' start_default_scenario_of_worker ' + worker_package_with_default_scenario else: raise RuntimeError('Neither script filename nor default scenario package provided.') start = subprocess.Popen(shlex.split(invocation.encode('ascii')), stdout=subprocess.PIPE, stderr=subprocess.PIPE) start_out, start_err = start.communicate() try: bench_id = json.loads(start_out)['id'] except Exception: print 'mzbench returned invalid json: \nCommand: {0}\nOutput: {1}\nStderr: {2}'.format(invocation, start_out, start_err) raise if (post_start is not None) and wait_status(bench_id, 'running', 50): print "Calling post start for {0}".format(bench_id) post_start(bench_id) wait = subprocess.Popen(shlex.split( mzbench_dir + 'bin/mzbench --host=localhost:4800 status --wait {0}'.format(bench_id)), stdout=subprocess.PIPE, stderr=subprocess.PIPE) wait.communicate() return (bench_id, wait.returncode == 0) attempt = 0 while attempt < max_retries: print 'Attempt #{0}'.format(attempt) try: (bench_id, success) = run() except Exception as e: print "Unexpected error: {0}".format(e) bench_id, success = (None, False) if xor(success, should_fail): if not expected_log_message_regex and not check_log_function: # no need to check the log return bench_id log_cmd = mzbench_dir + 'bin/mzbench --host=localhost:4800 log {0}'.format(bench_id) log = cmd(log_cmd) re_match = None if expected_log_message_regex: if isinstance(expected_log_message_regex, str) or isinstance(expected_log_message_regex, unicode): regex = re.compile(expected_log_message_regex, re.DOTALL + re.UNICODE) else: regex = expected_log_message_regex re_match = regex.search(log) maybe_error = None if check_log_function: maybe_error = check_log_function(log) if not re_match or maybe_error: print if maybe_error: print "Log doesn't pass custom check:\n{0}\n\n".format(maybe_error) if not re_match: print u"Log doesn't contain expected log message '{0}':\n".format(regex.pattern) print log raise RuntimeError return bench_id print 'Attempt #{0} for bench-id {1} unexpectedly {2}, retrying.'.format(attempt, bench_id, 'succeeded' if should_fail else 'failed') attempt += 1 if (max_retries <= attempt): print('All {0} attempts failed'.format(max_retries)) print('Log of the last attempt (bench {0}):'.format(bench_id)) if bench_id is not None: log_cmd = mzbench_dir + 'bin/mzbench --host=localhost:4800 log {0}'.format(bench_id) print cmd(log_cmd).replace('\\n', '\n') raise RuntimeError('BenchId {0} for test {1} unexpectedly {2}'.format( bench_id, name, 'succeeded' if should_fail else 'failed'))
def do_render(self): num_msgs = cmd('notmuch', 'count', '--output=messages', self.query) return self.fmt.format(num_msgs) if int(num_msgs) > 0 else 'No mail'
#!/usr/bin/env python import os, json, socket, sys, util path = os.path.split(os.path.realpath(__file__))[0]; os.chdir(path); sys.path.insert(0, path) os.environ['DEVEL']='yes' os.environ['PGHOST']=os.path.join(path, 'postgres_data/socket') if 'TMUX' in os.environ: # see https://github.com/sagemathinc/cocalc/issues/563 del os.environ['TMUX'] util.chdir() ports = util.get_ports() base_url = util.base_url() hostname = 'localhost' cmd = "service_hub.py --dev --foreground --hostname={hostname} --port={hub_port} --proxy_port=0 --gap=0 --base_url={base_url} start".format( hostname = hostname, base_url = base_url, hub_port = ports['hub']) util.cmd(cmd)
def _build(): bdb = os.path.join(PATCHER, "build.bat") util.cmd(bdb)
#!/usr/bin/env python """ This is a script for starting postgres for development purposes on a laptop. """ import os, sys, time, util path = os.path.split(os.path.realpath(__file__))[0]; os.chdir(path); sys.path.insert(0, path) PG_DATA = os.path.join(path, "postgres_data") if not os.path.exists(PG_DATA): util.cmd("pg_ctl init -D '%s'"%PG_DATA) # Lock down authentication so it is ONLY via unix socket open(os.path.join(PG_DATA,'pg_hba.conf'), 'w').write( """ # This is safe since we only enable a socket protected by filesystem permissions: local all all trust # You can uncomment this and comment out the above if you want to test password auth. #local all all md5 """) # Make it so the socket is in this subdirectory, so that it is # protected by UNIX permissions. This approach avoids any need # for accounts/passwords for development and the Docker image. conf = os.path.join(PG_DATA, 'postgresql.conf') s = open(conf).read() s += '\n'
def add_tabsint_plugins(pluginDict, src=True, docs=True, cordova=True): """ add tabsint plugins listed in config.json """ try: for name, plugin in pluginDict.iteritems(): # defaults if "version" not in plugin: plugin["version"] = None if "debug" not in plugin: plugin["debug"] = False # version logic if plugin["debug"]: version = None ut.log.info( '[WARNING]: Installing plugin "' + name + '" in debug mode. Ignoring version field and pulling from the HEAD or current working copy' ) elif plugin["version"] == None or plugin["version"] == "": rm_tabsint_plugins() sys.exit( '[ERROR]: No "version" value specified for the "{0}" plugin. This field is required.'.format(name) ) else: version = plugin["version"] # src if src: if "src" in plugin: get_tabsint_plugin(name, plugin["src"], version) else: sys.exit( '[ERROR]: Tabsint plugin "{0}" specified, but no source is included in the "src" field'.format( name ) ) # docs if docs: if "docs" in plugin: get_tabsint_plugin_docs(name, plugin["docs"], version) else: ut.log.info('[WARNING]: Tabsint plugin "{0}" has no documentation'.format(name)) # cordova plugins if cordova and "cordova" in plugin: if "https://" in plugin["cordova"]["src"]: # remote git-based cordova plugins if "#:/" in plugin["cordova"]["src"]: url = "#{0}:/".format(version).join(plugin["cordova"]["src"].split("#:/")) else: url = "{0}#{1}".format(plugin["cordova"]["src"], version) else: # local directory-based cordova plugins url = plugin["cordova"]["src"] ut.log.debug("[DEBUG]: Downloading cordova plugin from url: {0}".format(url)) ut.cmd("npm run cordova --silent -- plugin add " + url) ut.log.info( '[BUILD]: Successfully installed the cordova plugin associated with tabsint plugin "{0}"'.format( name ) ) write_plugin_require(pluginDict) except Exception as e: rm_tabsint_plugins() sys.exit("[ERROR]: Failure while adding tabsint plugins with exception: \n\n {0}".format(str(e)))
def _cmd_add_ssid(self, network_id, ssid): return cmd("sudo wpa_cli set_network {} ssid {}".format(network_id, toHex(ssid)))
def _cmd_save_config(self): return cmd("sudo wpa_cli save_config")
def restart_test(): bench_id = run_successful_bench(scripts_dir + 'correct_script.erl') restarted_id = restart_bench(bench_id) cmd(mzbench_dir + 'bin/mzbench status --wait {0}'.format(restarted_id))
def _cmd_add_psk(self, network_id, psk=None): if psk is None or len(psk) == 0: return cmd("sudo wpa_cli set_network {} key_mgmt NONE".format(network_id)) else: return cmd("sudo wpa_cli set_network {} psk '\"{}\"' & sudo wpa_cli set_network {} key_mgmt WPA-PSK ".format(network_id, psk, network_id))
#!/usr/bin/env python import os, sys path = os.path.split(os.path.realpath(__file__))[0]; os.chdir(path); sys.path.insert(0, path) import util util.chdir() util.cmd("cd ../../; npm run webpack-watch-map")
def _cmd_enable_network(self, network_id): try: _id = int(network_id) return cmd("sudo wpa_cli enable_network {}".format(_id)) except Exception as ex: pass
""" Start """ import os, json, socket, sys, util path = os.path.split(os.path.realpath(__file__))[0]; os.chdir(path); sys.path.insert(0, path) os.environ['DEVEL']='yes' os.environ['PGHOST']=os.path.join(path, 'postgres_data/socket') util.chdir() ports = util.get_ports() base_url = util.base_url(ports['hub-share-2']) print('''\n\nBASE URL: {}\n\n'''.format(base_url)) share_path= os.path.join(os.environ['SMC_ROOT'], 'data/projects/[project_id]') cmd = "cd ../../ && . smc-env && service_hub.py --share_path={share_path} --foreground --hostname=0.0.0.0 --port=0 --share_port={share_port} --proxy_port=0 --gap=0 --base_url={base_url} start".format( base_url = base_url, share_port = ports['hub-share-2'], share_path = share_path) util.cmd(cmd)
def _cmd_select_network(self, network_id): return cmd("sudo wpa_cli select_network {}".format(network_id))