def rebase(args, repo_name): target_branch = args.branch if is_rebase_active(): _exit('rebase is already in progress') stashed = False if is_dirty(): if utils.query_yes_no('stash modified files and continue?'): utils.cmd('git stash') stashed = True else: _exit('working directory is dirty and changes not stashed') try: utils.cmd(f'git rebase -i --autosquash {target_branch}', None, None) except subprocess.CalledProcessError: if utils.query_yes_no('rebase failed! abort?', default='no'): utils.cmd('git rebase --abort') if stashed: utils.cmd('git stash pop') else: utils.cmd('git status') print('1. Resolve conflicts in "Unmerged paths" file list') print('2. $ git add <conflict-files>') print('3. $ git rebase --continue') print('Start over using $ git rebase --abort') if stashed: print('Warning: you have stashed changes!') _exit('rebase did not complete!')
def buildTarBall(tarball, fileName, dirName): confdir = dirName + "/etc/hadoop/" utils.cmd("rm -rf " + dirName) utils.cmd("tar -xzf " + tarball) utils.cmd("cp conf/* " + confdir) utils.cmd("tar -czf " + fileName + " " + dirName) utils.cmd("rm -rf conf slaves " + dirName)
def after_refresh (self, it): mod, it = self.view.get_selection().get_selected() id = mod.get_value (it, 0) data = self.vars[0].get_text () vasca = self.vars[1].get_text () ph = self.vars[2].get_text () kh = self.vars[3].get_text () gh = self.vars[4].get_text () no = self.vars[5].get_text () no2 = self.vars[6].get_text () cond = self.vars[7].get_text () ammo = self.vars[8].get_text () ferro = self.vars[9].get_text () rame = self.vars[10].get_text () fosfati = self.vars[11].get_text () calcio = self.vars[12].get_text () magnesio = self.vars[13].get_text () densita = self.vars[14].get_text () limiti = self.vars[15].get_text () utils.cmd("update test set date='%(data)s', vasca='%(vasca)s', ph='%(ph)s', kh='%(kh)s', gh='%(gh)s', no='%(no)s', noo='%(no2)s', con='%(cond)s', amm='%(ammo)s', fe='%(ferro)s', ra='%(rame)s', fo='%(fosfati)s', calcio='%(calcio)s', magnesio='%(magnesio)s', densita='%(densita)s', limiti='%(limiti)s' where id=%(id)s" % vars ()) self.update_status (dbwindow.NotifyType.SAVE, _("Row Aggiornata (ID: %d") % id) self._check_iterator (mod, it)
def setup_mysql57(): """ 在宿主机安装 mysql5.7 为什么不以docker安装database? 测试时发现了很多问题。最终放弃了以docker方式安装 mysql5.7 """ config_info = get_config_info() root_pwd = config_info['mysql']['root_password'] print("docker安装 mysql5.7 开始") cmd(("docker run --name mysql -d --net=host -e MYSQL_ROOT_PASSWORD={} " "-v /var/loonflow/db:/var/lib/mysql mysql:5.7.21").format(root_pwd)) print("docker安装 mysql5.7 完成") print("睡眠10秒,确保mysql启动完成") time.sleep(10) db_name = 'loonflow' sql_file = '/opt/loonflow/docker_compose_deploy/loonflow_shutongflow/loonflow-web/loonflow_demo_init.sql' sql_to_docker_mysql(root_pwd, db_name, sql_file) print("睡眠10秒,确保sql导入完成") time.sleep(10) db_name = 'shutongflow' sql_file = '/opt/loonflow/docker_compose_deploy/loonflow_shutongflow/shutongflow/shutongflow_demo_init.sql' sql_to_docker_mysql(root_pwd, db_name, sql_file)
def add_entry (self, it): mod, id = self.view.get_selection ().get_selected () mod = self.store id = mod.get_value (it, 0) utils.cmd ('insert into test values(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)', id, self.vars[0].get_text (), self.vars[1].get_text (), self.vars[2].get_text (), self.vars[3].get_text (), self.vars[4].get_text (), self.vars[5].get_text (), self.vars[6].get_text (), self.vars[7].get_text (), self.vars[8].get_text (), self.vars[9].get_text (), self.vars[10].get_text (), self.vars[11].get_text (), self.vars[12].get_text (), self.vars[13].get_text (), self.vars[14].get_text (), self.vars[15].get_text () ) self.update_status(dbwindow.NotifyType.ADD, _("Row aggiunta (ID: %d)") % id) self._check_iterator (mod, it)
def squash(args, repo_name): result = utils.cmd('git rev-parse --short HEAD') last_commit = result.stdout.rstrip() try: utils.cmd(f'git commit -a -m "squash! {last_commit}"') except subprocess.CalledProcessError as e: print(e.stdout)
def force_push(args, repo_name): origin_url = get_remote_url(ORIGIN_NAME, 'push') branch_name = get_current_branch() if utils.query_yes_no(f'force push to {origin_url}?'): utils.cmd(f'git push {ORIGIN_NAME} -f {branch_name}') else: _exit('force push canceled')
async def get_log(ctx, logfile): c = 'ssh %s@%s ls HomeAlone/code/logs/web/' % (HOSTN, SERVE) if logfile in utils.cmd(c, False): await ctx.send('Found %s. Downloading it now.' % logfile) c = f"sftp {HOSTN}@{SERVE}:/home/{HOSTN}/HomeAlone/code/logs/web/ <<< $'get {logfile}'" utils.arr2str(utils.cmd(c, False)) await ctx.send('Finished downloading**[%d bytes]**' % os.path.getsize(os.getcwd() + '/%s' % logfile))
def link_sys(self, link_cmd): for module in self.kb.modules(include_bin=False): print '[module %s@%s]' % (module.yotta_name(), module.path) utils.cmd('kubos', link_cmd, cwd=module.path) for target in self.kb.targets(): print '[target %s@%s]' % (target.yotta_name(), target.path) utils.cmd('kubos', link_cmd + '-target', cwd=target.path)
def is_rebase_active(): try: utils.cmd('git rebase --show-current-patch') except subprocess.CalledProcessError as e: error = e.stderr.rstrip() if error != 'fatal: No rebase in progress?': raise e return False return True
def link_app(self, app_dir, link_cmd): print '[app %s]' % app_dir for module in self.kb.modules(include_bin=False): utils.cmd('kubos', link_cmd, module.yotta_name(), cwd=app_dir) for target in self.kb.targets(): utils.cmd('kubos', link_cmd + '-target', target.yotta_name(), cwd=app_dir)
def after_refresh (self, it): mod, it = self.view.get_selection ().get_selected () id = mod.get_value (it, 0) date = self.vars[0].get_text () giorni = self.vars[1].get_text () utils.cmd ("update filtro set date='%(date)s', giorni='%(giorni)s'" % vars ()) self.update_status (dbwindow.NotifyType.SAVE, _("Row aggiornata (ID: %d)") % id)
def _collect(): """ As of Feb. 2018, 1600 * 100 images account for 10% of total public images on docker hub """ end_page, page_size = 2000, 100 counter, gap = 0, 8 for start_page in range(1400, end_page, page_size): counter += 1 if counter % gap == 0: time.sleep(3600) cmd("python3 collect.py {} &".format(start_page))
def add_entry (self, it): mod, id = self.view.get_selection ().get_selected () id = mod.get_value (it, 0) utils.cmd ('insert into filtro values (?,?,?)', id, self.vars[0].get_text (), self.vars[1].get_text ()) self.update_status (dbwindow.NotifyType.ADD, _("Row aggiunta (ID: %d)") % id)
def changelog_part(commitish_to: str, commitish_from: str, version: str): date = utils.cmd(["git", "log", "-1", "--format=%ci", commitish_to]) commit_range = ( f"{commitish_from}..HEAD" if commitish_to == "HEAD" else f"{commitish_from}..{commitish_to}~" ) commits = utils.cmd( ["git", "log", "--no-merges", "--date-order", "--format=%H%x09%s", commit_range] ) if commits == "": return "" messages = {} for commit in commits.split("\n"): sha, msg = commit.split(maxsplit=1) shortsha = utils.cmd(["git", "log", "-1", "--format=%h", sha]) try: data = utils.conventional_commit_parse(msg) messages.setdefault(data["type"], []).append( {**data, "sha": sha, "shortsha": shortsha} ) except: # No conventional commit pass content = [ f"## [{version}]({GITHUB_COMPARE_URL}/{commitish_from}...{version}) ({date})" ] for group in GROUP_TITLES.keys(): if group not in messages: continue content.append(f"\n### {GROUP_TITLES[group]}\n") for data in messages[group]: prefix = ( f' - **{data["scope"]}**: ' if data["scope"] is not None else " - " ) postfix = f' ([{data["shortsha"]}]({GITHUB_COMMIT_URL}/{data["sha"]}))' if data["breaking"]: content.append(f'{prefix}**BREAKING** {data["description"]}{postfix}') else: content.append(f'{prefix} {data["description"]}{postfix}') return "\n".join(content)
def appium_start(host, port): if port_utils.check_port(port) == True: port_utils.release_port(port) utils.cmd("/usr/local/bin/appium --version") # cmd = '/usr/local/bin/appium --allow-insecure chromedriver_autodownload -a ' + host + ' -p ' + str(port) + ' -g ./log/appium_server_'+str(port)+'.log' cmd = '/usr/local/bin/appium --allow-insecure chromedriver_autodownload -a ' + host + ' -p ' + str( port) + ' -g ./log/appium_server_' + str(port) + '.log' print('%s at %s' % (cmd, ctime())) utils.cmd(cmd)
def after_refresh (self, it): mod, it = self.view.get_selection().get_selected() id = mod.get_value (it, 0) date = self.vars[0].get_text () nome = self.vars[1].get_text () quantita = self.vars[2].get_text () giorni = self.vars[3].get_text () utils.cmd ("update fertilizzante set date='%(date)s', nome='%(nome)s', quantita='%(quantita)s', giorni='%(giorni)s' where id=%(id)s" %vars()) self.update_status(dbwindow.NotifyType.SAVE, _("Row aggiornata (ID: %d)") % id)
async def delete_log(ctx, logfile): await ctx.send('*Getting list of log files*') c = 'ssh %s@%s ls HomeAlone/code/logs/web/' % (HOSTN, SERVE) if logfile in utils.cmd(c, False): await ctx.send( 'Cool cool cool. **%s** is gone <a:boom:818243760055779359>' % logfile) c2 = 'ssh %s@%s rm HomeAlone/code/logs/web/%s' % (HOSTN, SERVE, logfile) utils.cmd(c2, False) else: await ctx.send("Can't find %s" % logfile)
def change_mysql_psw(new_psw): """ :param new_psw: 新密码 """ key_str = 'password is generated for root@localhost: ' with open('/var/log/mysqld.log') as f: for each_line in f.readlines(): if key_str in each_line: init_psw = each_line.split(key_str)[-1][:-1] cmd("mysqladmin -uroot -p'{}' password '{}'".format(init_psw, new_psw)) print("修改 mysql5.7 root密码完成") return print("ERROR: 修改 mysql5.7 root密码失败!未找到初始密码。")
def handle_more_than_1k(self): if self.misc["handled_more_than_1k"]: return output_dir = self.sample["crab"]["outputdir"] without_zeros = self.sample["crab"]["outputdir"].replace("0000","") for kilobatch in os.listdir(without_zeros): if kilobatch == "0000": continue u.cmd("mv {0}/{1}/*.root {0}/{2}/".format(without_zeros, kilobatch, "0000")) u.cmd("mv {0}/{1}/log/* {0}/{2}/log/".format(without_zeros, kilobatch, "0000")) self.do_log("copied files from .../*/ to .../0000/") self.misc["handled_more_than_1k"] = True
def create(self): if self.loaded: die('create: project already loaded/exists') if os.path.exists(self.treepath): die('create: project root exists on %s\n' % HOSTNAME) # create internal list of requested features for fe in self.featurelist: for ft in features.FEATURES: if ft.fid == fe[0]: self._featureappend(ft, fe[1:]) # resolve feature dependencies for fe in self.features: fe.depresolve() # create tree os.mkdir(self.treepath, 0775) for node in self.tree: os.mkdir(os.path.join(self.treepath, self.tree[node]), 0775) # check credentials existance on localhost if not self.group in [g[0] for g in grp.getgrall()]: cmd = '/usr/sbin/groupadd %s' % self.group utils.cmd(cmd) if not self.user in [p[0] for p in pwd.getpwall()]: cmd = '/usr/sbin/useradd -d %s -g %s %s' % (self.treepath, self.group, self.user) utils.cmd(cmd) # create features # create() should return config string, so keep them in a list felist = [fe.create() for fe in self.features] # create hook scripts hook_up = os.path.join(self.getpath('scripts').abs(), '%s_up' % HOSTNAME) open(hook_up, 'w').write('#!/bin/sh\n') os.chmod(hook_up, 0774) hook_down = os.path.join(self.getpath('scripts').abs(), '%s_down' % HOSTNAME) open(hook_down, 'w').write('#!/bin/sh\n') os.chmod(hook_down, 0774) # write project description self._writedescr() # write project config self._writeconf(felist) return
def branch(args, repo_name): if args.warn_base: current_branch = get_current_branch() if current_branch not in args.warn_base: message = f'Current branch {current_branch} is not in {args.warn_base}, proceed?' # noqa:E501 if not utils.query_yes_no(message, default='no'): exit('Exited without creating branch!') branches = list_branches(pattern=f'{args.prefix}[0-9]*') new_index = max([ int(branch.split(f'{args.prefix}')[1]) for branch in branches ] + [0]) + 1 new_branch = f'{args.prefix}{new_index}' utils.cmd(f'git checkout -b {new_branch}')
def copy_files(self): self.do_log("started copying files to %s" % self.sample["finaldir"]) if self.fake_copy: print "Will do: mv %s/merged/* to %s/" % (self.sample["crab"]["outputdir"], self.sample["finaldir"]) else: u.cmd("mkdir -p %s/" % self.sample["finaldir"]) u.cmd( "mv %s/merged/* to %s/" % (self.sample["crab"]["outputdir"], self.sample["finaldir"]) ) self.do_log("finished copying files") if self.get_events_in_chain(self.sample["finaldir"]+"/*.root") == self.sample['nevents_merged']: # if finaldir doesn't have nevents_merged, must've been a mv error, so redo merging and mv again self.sample["status"] = "done" else: self.do_log("lost some events after moving into final directory. re-merging now.") self.submit_merge_jobs()
def test(self, module_name="", target_name=""): module = next( (m for m in self.kb.modules() if m.yotta_name() == module_name), None) target = next( (t for t in self.kb.targets() if t.yotta_name() == target_name), None) if module and target: if 'testTargets' not in module.yotta_data: print("Please define testTargets for %s" % (module.yotta_name())) return 1 if target.yotta_name() not in module.yotta_data['testTargets']: return 0 print('Testing %s' % module.yotta_name()) print('Building [module %s@%s] for [target %s] - ' % (module.yotta_name(), module.path, target_name), end="") utils.cmd('kubos', 'link', '--all', cwd=module.path, echo=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) utils.cmd('kubos', 'target', target_name, cwd=module.path, echo=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) utils.cmd('kubos', 'clean', cwd=module.path, echo=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) utils.cmd('kubos', 'build', cwd=module.path, echo=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) ret = utils.cmd('kubos', 'test', cwd=module.path, echo=False) print('Result %d' % ret) return ret else: if module is None: print("Module %s was not found" % module_name) if target is None: print("Target %s was not found" % target_name) return 1
def scale_pdf_file(inputfile, outputfile): """ Scale a PDF file using the command line. There are different approaches to this and gs, imagemagick and pdftk (among others) can be used. Returns True for success or False for failure. """ KNOWN_ERRORS = [ # GS produces this with out of date libpoppler and Microsoft Print PDF "Can't find CMap Identity-UTF16-H building a CIDDecoding resource." ] code, output = utils.cmd(SCALE_PDF_CMD % { "output": outputfile, "input": inputfile }) for e in KNOWN_ERRORS: # Any known errors in the output should return failure if output.find(e) != -1: al.error("Abandon PDF scaling - found known error: %s" % e, "media.scale_pdf_file") return False # A nonzero exit code is a failure if code > 0: al.error("Abandon PDF scaling - nonzero exit code (%s)" % code, "media.scale_pdf_file") return False return True
def after_refresh (self, it): mod, it = self.view.get_selection ().get_selected () id = mod.get_value (it, 0) date = self.vars[0].get_text () vasca = self.vars[1].get_text () tipologia = self.vars[2].get_text () quantita = self.vars[3].get_text () nome = self.vars[4].get_text () soldi = self.vars[5].get_text () img = self.vars[6].get_text () utils.cmd ("update spese set date='%(date)s', vasca='%(vasca)s', tipologia='%(tipologia)s', quantita='%(quantita)s', nome='%(nome)s', soldi='%(soldi)s', img='%(img)s' where id = %(id)s" % vars ()) self.update_status (dbwindow.NotifyType.SAVE, _("Row aggiornata (ID: %d)") % id)
def main(): if os.geteuid() != 0: raise Exception("请以root权限运行") # print("使用阿里云yum源 开始") # use_aliyun_yum() # print("使用阿里云yum源 完成") print("安装 python3 开始") cmd("yum -y install python3") print("安装 python3 完成") print("安装 docker-compose 开始") cmd("pip3 install docker-compose -i https://pypi.doubanio.com/simple/ --trusted-host pypi.doubanio.com" ) print("安装 docker-compose 完成")
def list_branches(pattern=''): output = utils.cmd(f'git branch --no-color --list {pattern} --format="%(refname:short)"') # noqa:E501 return [ x for x in output.stdout.split('\n') if x != '' ]
def query_file(self, csock, caddr, api_req): api_req = api_req.split(' :::: ')[1] if len(api_req.split('?')) > 1: file_name = api_req.split('?')[0] req = api_req.split('?')[1] # make sure file exists try: if req == 'SIZE': sz = os.path.getsize(os.getcwd() + '/PoolData/Shares/%s' % file_name) reply = '%s is %d bytes, and was last modified %s' csock.send( reply % (file_name, sz, utils.cmd( 'date -r ' + os.getcwd() + '/PoolData/Shares/%s' % file_name, False).pop())) if req == 'DATA': csock.send( open(os.getcwd() + '/PoolData/Shares/%s' % file_name, 'r').read()) except OSError: csock.send('Sorry, something went wrong handling <%s>' % api_req) return csock
def ping_server(server): ping = utils.cmd('ping -c 1 %s' % server, False) if 'bytes' in (ping[1].split(' ')): delay = float(ping[1].split(' ')[-2].split('=')[1]) print('[*] Can reach Server [%fms ping]' % delay) else: delay = -1 return delay
async def list_log_files(ctx): await ctx.send('*Getting list of log files*') c = 'ssh %s@%s ls -la HomeAlone/code/logs/web/' % (HOSTN, SERVE) result = '```' + (utils.arr2str(utils.cmd(c, False))) + '```' try: await ctx.send(result) except: print(result) pass
async def check_alarm(ctx, filename, n): print('[-] Checking Alarm File') while ARMED: await asyncio.sleep(35) try: c = f"sftp {HOSTN}@{SERVE}:/home/{HOSTN}/TripWire/tripwire/.alerts/alarm/ <<< $'get alarm'" utils.arr2str(utils.cmd(c, False)) if filename in utils.swap(filename, True): n = N m = '{0.author.mention} **New Connection <a:siren:833794872204722248> **'.format( ctx.message) m += '```' + utils.arr2str( utils.cmd(f"tail -n 3 {filename} ", False)) + '```' await ctx.send(m) except IndexError: print('[!] Unable to read log file') pass
def svnimport(self): if not self.loaded: die('%s not loaded (it does not exist?)' % name) rc = utils.cmd('svn import -m \'%s initial import\' %s %s/%s' % (self.name, self.treepath, SUBVERSION_PATH, self.name), system=True) if rc[0] != 0: die('svn error') rc = utils.cmd('svn checkout --force %s/%s %s' % (SUBVERSION_PATH, self.name, self.treepath), system=True) if rc[0] != 0: die('svn error') rc = utils.cmd('cd %s; svn propset svn:ignore * logs;' \ 'svn commit -m \'ignore logs\'' % self.treepath)
def _openscad(self, scad_file_path, options, output_file, part): 'Execute the openscad command.' command = self.openscad_path + ' ' + scad_file_path + ' -o ' + output_file + ' ' + options process = utils.cmd(command, self.verbose_lvl) self.process_list.append((process, part)) if len(self.process_list) >= self.nb_job_slots: process.wait()
def run_tests(master_repo_url, pull_request_repo_url, pull_request_branch, master_repo_path, test_command, interpreter, python3): """ This is a test runner function. It doesn't access any global variables. It assumes that the master repository is checked out at 'master_repo_path' (for example in the /tmp directory somewhere). It does the following: 1) fetches the 'pull_request_branch' from 'pull_request_repo_url' and tries to apply it 2) runs tests using 'test_command' and the Python 'interpreter'. 3) saves report and logs into the out/ directory. 4) Returns status, which is one of the following strings: fetch ... fetch failed (no tests run) conflicts ... there were merge conflicts (no tests run) FAILED ... tests run, but failed PASSED ... tests run, passed """ print "Running tests with the following setup:" print "master_repo_url =", master_repo_url print "pull_request_repo_url =", pull_request_repo_url print "pull_request_branch =", pull_request_branch print "master_repo_path =", master_repo_path print "test_command =", test_command print "interpreter =", interpreter try: cmd("cd %s; git fetch %s %s:test" % (master_repo_path, pull_request_repo_url, pull_request_branch), echo=True) except CmdException: return {"result": "fetch", "log": ""} cmd("cd %s; git checkout test" % master_repo_path, echo=True) try: cmd("cd %s; git merge master" % master_repo_path, echo=True) except CmdException: return {"result": "conflicts", "log": ""} if python3: cmd("cd %s; bin/use2to3" % master_repo_path) master_repo_path = master_repo_path + "/py3k-sympy" log, r = cmd2("cd %s; %s %s" % (master_repo_path, interpreter, test_command)) print "Return code:", r if r == 0: result = "Passed" else: result = "Failed" return {"result": result, "log": log, "return_code": r}
def symlink(link, target, overwrite=False): target = os.path.abspath(target) link = os.path.abspath(link) if os.path.lexists(link): if overwrite: os.remove(link) else: return if OS == "windows": cmd = "mklink {} {} {}".format( "/d" if os.path.isdir(target) else "", shell_escape(link), shell_escape(target), ) if OS == "linux": cmd = "ln -s {} {}".format( shell_escape(target), shell_escape(link), ) utils.cmd(cmd, silent=True)
async def set_alarm(ctx, filename): try: c = f"sftp {HOSTN}@{SERVE}:/home/{HOSTN}/HomeAlone/code/logs/web/ <<< $'get {filename}'\n" c += f"ssh {HOSTN}@{SERVER} echo '{PATH}' >> /home/{HOSTN}/HomeAlone/code/filelist.txt\n" utils.arr2str(utils.cmd(c, False)) PATH = f'/home/{HOSTN}/HomeAlone/code/logs/web/{filename}' utils.cmd(c2) n = int( utils.cmd("cat %s| grep 'Connection at ' | wc -l" % filename, False).pop()) await ctx.send( ':ok_hand: *Setting Alarm on %s*, which currently has **%d** entries.' % (filename, n)) ARMED = True bot.loop.create_task(check_alarm(ctx, filename, n)) except: c = 'ssh %s@%s ls -la HomeAlone/code/logs/web/' % (HOSTN, SERVE) result = 'Something went wrong... Select one of these to set alarm on:\n' result += '```' + (utils.arr2str(utils.cmd(c, False))) + '```' pass
def after_refresh (self, it): mod, it = self.view.get_selection ().get_selected () id = mod.get_value (it, 0) text = self.vars[0].get_text () date = self.vars[1].get_text () name = self.vars[2].get_text () litri = self.vars[3].get_text () tacq = self.vars[4].get_text () tflt = self.vars[5].get_text () ico2 = self.vars[6].get_text () illu = self.vars[7].get_text () reat = self.vars[8].get_text () schiu = self.vars[9].get_text () risca = self.vars[10].get_text () img = self.vars[11].get_text () utils.cmd ("update vasca set vasca='%(text)s', date='%(date)s', nome='%(name)s', litri='%(litri)s', tipo='%(tacq)s', filtro='%(tflt)s', co='%(ico2)s', illuminazione='%(illu)s', reattore='%(reat)s', schiumatoio='%(schiu)s', riscaldamento='%(risca)s', img='%(img)s' where id = %(id)s" % vars()) self.update_status (dbwindow.NotifyType.SAVE, _("Row aggiornata (ID: %d)") % id)
async def show_connection(ctx): msg = 'Aw Geez, lets see who is connected <:morty:833787213766066176>'.format( ctx.message) await ctx.send(msg) c = 'ssh %s@%s netstat -antup' % (HOSTN, SERVE) result = '```' + (utils.arr2str(utils.cmd(c, False))) + '```' try: await ctx.send(result) except: print(result) await ctx.send('```' + result.split('established)\n')[1:]) pass
def handle_sample_problems(self): problems = self.sample["checks"]["problems"] merged_dir = self.sample["crab"]["outputdir"]+"/merged/" for problem in problems: if "Wrong event count" in problem: # delete this imerged if not self.do_skip_tail: imerged = int(problem.split(".root")[0].split("_")[-1]) u.cmd("rm %s/merged_ntuple_%i.root" % (merged_dir, imerged)) self.submit_merge_jobs() else: # FIXME be smart about event counts? or is there no way to get event counts # until crab has finished? but that defeats purpose of do_skip_tail pass elif "events with zeros in" in problem: # delete all merged and remerge u.cmd("rm %s/merged_ntuple_*.root" % (merged_dir)) self.submit_merge_jobs() elif "DAS query failed" in problem: # probably transient, ignore and let check() try again later pass
def setupConf(master, slaves, port, javahome, localDirString): # copy templates to conf utils.cmd("rm -rf conf; cp -r hadoop1-template conf") # setup conf with the details utils.cmd("cd conf; sh mr1-setup-conf.sh " + master + " " + port + " " + javahome + " " + localDirString + "; cd ..") writeSlavesFile(slaves) utils.cmd("cp slaves conf/slaves")
def build(self, module_name="", target_name=""): module = next( (m for m in self.kb.modules() if m.yotta_name() == module_name), None) target = next( (t for t in self.kb.targets() if t.yotta_name() == target_name), None) if module and target: print('Building [module %s@%s] for [target %s] - ' % (module.yotta_name(), module.path, target_name), end="") utils.cmd('kubos', 'target', target_name, cwd=module.path, echo=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) utils.cmd('kubos', 'clean', cwd=module.path, echo=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) ret = utils.cmd('yt', 'build', cwd=module.path, echo=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) print('Result %d' % ret) return ret else: if module is None: print("Module %s was not found" % module_name) if target is None: print("Target %s was not found" % target_name) return 1
async def read_log(ctx, filename): c = f'ssh {HOSTN}@{SERVE} ls HomeAlone/code/logs/web/' if filename in utils.cmd(c, False): await ctx.send('**This will take a minute...**') await ctx.send('<a:rickspin:834261749846507520>') c = f"sftp {HOSTN}@{SERVE}:/home/{HOSTN}/HomeAlone/code/logs/web/ <<< $'get {filename}'" utils.arr2str(utils.cmd(c, False)) f = open(filename, 'r') while True: piece = f.read(1024) if not piece: break try: await ctx.send('```\n' + piece.replace(SERVE, '<removed>') + '\n```') time.sleep(3) except: print(result) pass f.close() os.remove(filename) else: await ctx.send("I can't find %s" % filename)
def add_entry (self, it): mod, id = self.view.get_selection ().get_selected () id = mod.get_value (it, 0) for i in self.vars: print i.get_text () utils.cmd ('insert into vasca values(?,?,?,?,?,?,?,?,?,?,?,?,?)', id, self.vars[0].get_text (), self.vars[1].get_text (), self.vars[2].get_text (), self.vars[3].get_text (), self.vars[4].get_text (), self.vars[5].get_text (), self.vars[6].get_text (), self.vars[7].get_text (), self.vars[8].get_text (), self.vars[9].get_text (), self.vars[10].get_text (), self.vars[11].get_text ()) self.update_status (dbwindow.NotifyType.ADD, _("Row aggiunta (ID: %d)") % id)
def setup_env(): env_data = '' myname = utils.cmd('hostname', False).pop() extip = utils.get_ext_ip() intip = utils.get_internal_addr() env_data += ('HOSTNAME=%s\n' % myname) env_data += ('EXTERNAL_IP=%s\n' % extip) if len(intip) == 1: env_data += ('INTERNAL_IP1=%s\n' % intip) else: ip = 1 for addr in intip: env_data += ('INTERNAL_IP%d=%s\n' % (ip, addr)) ip += 1 env_notice() open(os.getcwd() + '/.env', 'w').write(env_data)
def find_missing_nodes(): # Look at LAN for whats connected # compare to MACs and find new IPs if they match nodes = [] disconnected = [] for n in get_node_names(): nodes.append(n.split('/')[-1].split('.')[0]) nodes = list(set(nodes)) # remove any duplicates if they exist node_table = {} old_ref = {} # Check which of these appear offline (because IP changed) for n in nodes: h, i, p, m = utils.load_credentials(n, False) old_ref[m] = [h, i, m] check = False try: check = check_connected(h, i, p) except IndexError: pass if not check: node_table[m] = '' disconnected.append(m) else: node_table[m] = [h, i] # fill node_table with new IPs for those MACs if they can be found node_ref = {} corrected = {} print('[*] %d Disconnected Nodes' % len(disconnected)) if len(disconnected): test_scan = utils.cmd('sudo nmap -T5 -Pn 192.168.1.0/24 -p 22', False) current_host = '' for line in test_scan: if len(line.split(' scan report for ')) > 1: current_host = line.split(' report for ')[1] node_ref[current_host] = '' if len(line.split('MAC Address: ')) > 1: mac = line.split('MAC Address: ')[1].split(' ')[0] # node_ref[mac] = current_host if mac in node_table.keys() and mac in disconnected: h, oi, m = old_ref[mac] corrected[mac] = [h, current_host] else: corrected[mac] = old_ref[mac] return corrected
def get_remotes(): remotes = {} result = utils.cmd('git remote -v') for line in result.stdout.rstrip().split('\n'): parts = re.split('\t| ', line) remote_name = parts[0] remote_url = parts[1] remote_action = parts[2].replace('(', '').replace(')', '') if remote_name not in remotes: remotes[remote_name] = {} if remote_action not in remotes[remote_name]: remotes[remote_name][remote_action] = '' remotes[remote_name][remote_action] = { 'url': remote_url, 'account': re.split(':|/', remote_url)[1] } return remotes
def svncheckout(cls, name): if Project(name, nocreate=True).loaded: print 'project %s exists' % name return False rc = utils.cmd('svn checkout %s/%s %s' % (SUBVERSION_PATH, name, os.path.join(PROJECTROOT, name)), system=True) if rc[0] != 0: die('svn error') nproj = Project(name, nocreate=True) if not nproj.loaded: print 'project %s does not seem like a valid one' % name return False assert self.svn == True return nproj
def make_metadata(self): metadata_file = self.sample["crab"]["taskdir"]+"/metadata.txt" metadata_file_json = metadata_file.replace(".txt",".json") with open(metadata_file, "w") as fhout: print >>fhout,"sampleName: %s" % self.sample["dataset"] print >>fhout,"xsec: %s" % self.sample["xsec"] print >>fhout,"k-fact: %s" % self.sample["kfact"] print >>fhout,"e-fact: %s" % self.sample["efact"] print >>fhout,"cms3tag: %s" % self.sample["cms3tag"] print >>fhout,"gtag: %s" % self.sample["gtag"] print >>fhout,"sparms: %s" % (",".join(self.sample["sparms"]) if self.sample["sparms"] else "_") print >>fhout, "" print >>fhout,"unmerged files are in: %s" % self.sample["crab"]["outputdir"] print >>fhout, "" for ijob in sorted(self.sample["ijob_to_miniaod"]): print >>fhout, "unmerged %i %s" % (ijob, ",".join(self.sample["ijob_to_miniaod"][ijob])) print >>fhout, "" for imerged in sorted(self.sample["imerged_to_ijob"]): print >>fhout, "merged file constituents %i: %s" % (imerged, " ".join(map(str,self.sample["imerged_to_ijob"][imerged]))) print >>fhout, "" for imerged in sorted(self.sample["imerged_to_ijob"]): nevents_both = [self.sample["ijob_to_nevents"][ijob] for ijob in self.sample["imerged_to_ijob"][imerged]] nevents = sum([x[0] for x in nevents_both]) nevents_effective = sum([x[1] for x in nevents_both]) print >>fhout, "merged file nevents %i: %i %i" % (imerged, nevents, nevents_effective) d_tot = self.sample.copy() with open(metadata_file_json, "w") as fhout: json.dump(d_tot, fhout, sort_keys = True, indent = 4) # mirror the central snt directory structure for metadata files metadatabank_dir = "/nfs-7/userdata/metadataBank/%s/%s/%s/" \ % (self.sample["specialdir"], self.sample["shortname"], self.sample["cms3tag"].split("_",1)[1]) # copy txt to merged and backup. copy json to backup only u.cmd('chmod a+w %s %s' % (metadata_file, metadata_file_json)) u.cmd("cp %s %s/" % (metadata_file, self.sample["crab"]["outputdir"]+"/merged/")) u.cmd('mkdir -p {0} ; chmod a+w {0}'.format(metadatabank_dir)) u.cmd('cp %s %s %s/' % (metadata_file, metadata_file_json, metadatabank_dir)) self.do_log("made metadata and copied it to merged and backup areas")
def decrement_id (self, id): utils.cmd ("update filtro set id=%d where id=%d" % (id -1, id))
def down(self): if 'hook_down' in self.args: script = Path(self, self.args['hook_down'][0]) utils.cmd('sudo -u %s -H %s' % (self.user, script.abs()), system=True)
def remove_id (self, id): utils.cmd ('delete from invertebrati where id=%d' % id) self.update_status (dbwindow.NotifyType.DEL, _("Row rimossa (ID: %d)") % id)
def make_pictures(self): 'Generates pictures for the documentation.' import tempfile print "\n*** Creating pictures ***" img_opt = "--imgsize=" + str(IMG_SIZE * 2) + "," + str(IMG_SIZE * 2) img_dir = op.join(self.doc_dir, 'parts_img') os.makedirs(img_dir) cube_path = op.join(tempfile.gettempdir(), self.root.get('id') + '.scad') with open(cube_path, 'w') as scad_file: scad_file.write('import("' + op.join(op.dirname(self.doc_dir), 'original.stl') + '");') # TODO: bug fix option d'images pour le modèle # img_opt = img_opt + (" --camera=" + self.root.get('img') # if self.root.get('img') != 'yes' else '') command = 'openscad ' + cube_path + ' -o ' + op.join(img_dir, self.root.get('id') + '.png') os.system(command) # marche pas avec popen os.remove(cube_path) # TODO: ajouter recursivité + s'il pas d'img dans le noeud courant on regarde dans family # pour toutes les familles où il faut une image: for family in (family for family in self.root if 'img' in family.attrib): part_scad_path = op.join(self.scad_dir, family.get('file')) tmp_dxf = op.join(tempfile.gettempdir(), 'tmp_dxf') export_dir = img_dir if family.get('type') == 'stl' else tmp_dxf if family.get('type') == 'dxf': utils.create_dir(tmp_dxf, self.verbose_lvl) family_img = family.get('img') part_img_opt = img_opt + (" --camera=" + family_img if family_img != 'yes' else '') # if family.get('type') == 'stl' else False param = self.root.get('data') process.Process(part_scad_path, family, param, export_dir, self.jobs, self.openscad_path, self.verbose_lvl, part_img_opt) if family.get('type') == 'dxf': parts_list = list() for part in family: dxf_file = op.join(tmp_dxf, part.get('id') + '.dxf') scad_file = op.join(self.scad_dir, 'dxf2stl.scad') thickn = self.root.get('thickness') if 'thickness' in self.root.attrib else '1' command = 'openscad ' + scad_file + ' -D \'file="' + dxf_file + '"; thickness='\ + thickn + "'" + ' -o ' + op.join(img_dir, part.get('id') + '.png') parts_list.append(utils.cmd(command, self.verbose_lvl)) while (parts_list): for i, part in enumerate(parts_list): if part.poll() == 0: del parts_list[i] shutil.rmtree(tmp_dxf) dimentions = str(IMG_SIZE) + 'x' + str(IMG_SIZE) command = 'mogrify -trim +repage -resize ' + dimentions + \ ' -background "#FFFFE5" -gravity center -extent ' + dimentions + \ ' -fuzz 15% -transparent "#FFFFE5" ' + op.join(img_dir, '*.png') # shadow_cmd = 'for f in ' + op.join(img_dir, '*.png') + '; do convert $f -trim ' + \ # '\( +clone -background black -shadow 80x5+5+5 \) +swap -background none # -layers merge $f; done' utils.cmd(command, self.verbose_lvl)
def decrement_id (self, id): utils.cmd ("update piante set id=%d where id=%d" % (id - 1, id))
def decrement_id (self, id): utils.cmd ("update invertebrati set id=%d where id=%d" % (id - 1, id))
def touch(self): utils.cmd('chown -R %s:%s %s' % (self.user, self.group, self.treepath)) utils.cmd('chmod -R g+rw %s' % self.treepath) return
from glob import glob nbad = 0 for fname in glob(in_dir+"/new_xsec/*.root"): isbad, nentries, prob = check_merged_rootfile(fname, nevents) if isbad: nbad += 1 print("%s IS BAD: %s" % (fname, isbad)) print("Reason: %s" % (prob)) if isbad == 0: new_nevents = get_events_in_chain(in_dir + "/new_xsec/*.root") if new_nevents == nevents: print("rm -f {0}/*.root ; mv {0}/new_xsec/* {0}/".format(in_dir)) metadata["xsec"] = xsec metadata["kfact"] = kfact metadata["efact"] = efact with open("tmp.json", "w") as fhout: json.dump(metadata, fhout, sort_keys = True, indent = 4) u.cmd("cp tmp.json %s" % in_dir+"/new_xsec/metadata.json") print("made new metadata.json!") else: print("ERROR, nevents don't match (new,old) = (%i,%i)" % (new_nevents, nevents))
def decrement_id (self, id): utils.cmd ('update test set id=%d where id=%d' % (id - 1, id))
def remove_id (self, id): utils.cmd ('delete from fertilizzante where id=%d' % id) self.update_status(dbwindow.NotifyType.DEL, _("Row rimossa (ID: %d)" % id))
def decrement_id (self, id): utils.cmd ('update fertilizzante set id=%d where id=%d' % (id - 1, id))