def test_smoke(self): dirpath = tempfile.mkdtemp() output = os.path.join(dirpath, "avg_laptimes.csv") process.run(output=output) rows = [] with open(output) as csvfile: csv_reader = csv.DictReader(csvfile, ["driver", "time"]) for row in csv_reader: rows.append(row) expected = [ { "driver": "Alonzo", "time": "4.526666666666666" }, { "driver": "Hamilton", "time": "4.5633333333333335" }, { "driver": "Verstrappen", "time": "4.63" }, ] assert rows == expected shutil.rmtree(dirpath)
def job_process(job, context): if pre_job(job, context): #pre_job返回值决定要不要做这次build,如果需要,则先构造一条关联的build记录 build=Build(job=job, create_time=datetime.now(), success=False, duration=0, running=True, version='' if job.vcs_type == 'none' else context['vcsVer']) build.save() context['build']=build try: do_job(job, context) post_job(job, context) finally: #未知的异常,不再处理了,只是将build结束掉就好了 build.running=False build.save() job_email(job, build, context['setting'], ifSuccess=False) #调用用户finish_cmd try: os.environ["topo_builder_build_duration"]=str(build.duration) os.environ["topo_builder_build_success"]=str(build.success) os.environ["topo_builder_build_phrase"]=build.phrase.decode("utf-8").encode(locale.getdefaultlocale()[1]) if job.finish_cmd: process.run(job.finish_cmd, log=build_dir(job, build, LOG_FILE), dispTarget=True, terminatable=True) except Exception, e: print traceback.format_exc() #调用用户hook try: hook.post_job_hook(job, build, context) except Exception, e: print traceback.format_exc()
def get(self, address, work_copy, user, password, ver="HEAD", log=None): if os.path.exists(work_copy): raise Exception(work_copy + " has exist") if (address.find("@") == -1): return process.run("svn checkout -r " + ver + " " + StrUtil.quote(address) + " " + StrUtil.quote(work_copy) + self._get_svn_para(user, password), log=log) else: return process.run("svn checkout " + StrUtil.quote(address) + " " + StrUtil.quote(work_copy) + self._get_svn_para(user, password), log=log)
def main(use_calendar=False): def cli_log(message): print message tools.Logger.set_callback(cli_log) if use_calendar: process_calendar.run() else: process.run()
def get_runs_clone(module): runs_path = module / RUNS_DIR runs_path = runs_path.absolute().resolve() if not runs_path.exists(): # Initialize fresh runs/ dir from current state of repo run([ 'git', 'clone', module, runs_path]) return runs_path
def checkout_and_reset(branch, default_sha, new_tree, is_branch=True): """Force the working tree to match `new_tree` while the branch points to `default_sha`. After creating the branch if necessary, leaves staged/unstaged changes ready to be committed, simulating an in-progress cherry-pick of `new_tree` on top of the branch's current SHA. """ current_sha = checkout(branch, default_sha, is_branch=is_branch) run(['git', 'reset', '-q', '--hard', new_tree]) run(['git', 'reset', '-q', current_sha]) return current_sha
def refresh_mongodata(): """ This function will refresh the db when the db is not up to date. Request method: GET """ try: process.run() return "Data Updated" except Exception as e: return str(e)
def visualize(): variable = request.form.get("variable") ps.run(variable) n = ps.get_id() n = int(n) - 1 webbrowser.open_new_tab(f'http://localhost:5000/static/result{n}.jpg') return render_template('display_type.html'), 200
def main(): print(SECRET_PATH) # make sure arcanist is configured args = [] args.append(ARCANIST_PATH) args.append('set-config') args.append('phabricator-uri') args.append(PHABRICATOR_URI) # test process.run('php', args, ARCANIST_WORKING_DIR) handler = MyHandler socketserver.TCPServer.allow_reuse_address = True httpd = socketserver.TCPServer(("0.0.0.0", PORT), handler) httpd.serve_forever()
def start(on_port=None): global vis global port assert vis is None, "Cannot start more than 1 visdom servers." port = alloc_port() if on_port is None else on_port print("Starting Visdom server on %d" % port) process.run("%s -m visdom.server -p %d" % (sys.executable, port)) if not wait_for_port(port): print("ERROR: failed to start Visdom server. Server not responding.") return print("Done.") vis = visdom.Visdom(port=port)
def lists(runtime=True, tojson=True): results = [] rets = process.run(["ezjail-admin", "list"])[1] for ret in rets[2:]: col = ret.split() results.append({ "name": col[3], "ip": col[2], "running": col[1] != 'N/A', "host": None, }) if runtime == True: for node in nodes: r = requests.get(nodes[node] + '/list') res = r.json() for re in res: if res[re]['running'] == True: res[re]['host'] = node remove = None for x in results: if x['name'] == re: x['host'] = node x['running'] = True #results.append(res[re]) if tojson == True: return json.dumps(results) return results
def index(): global dir transcriptDBs = getTranscriptDBs() transcriptDB_names = sorted(transcriptDBs.keys()) if request.method == 'POST': input_records = request.form['inputfield'].split('\n') input_records = [str(x) for x in input_records] selected_db = request.form['selected_transcriptdb'] transcriptdb = transcriptDBs[selected_db] dir = makeOutputDir() CSNstrings, ENSTIDs = processInput(input_records) if len(CSNstrings) == 0: return render_template('index.html', transDBs=transcriptDB_names) gbuild = selected_db[selected_db.find('GRC'):-1] ref_fn = application.config[gbuild] results = process.run(dir, CSNstrings, ENSTIDs, transcriptdb, ref_fn) return render_template('results.html', results=results) return render_template('index.html', transDBs=transcriptDB_names)
def job(): if utils.is_weekday(): logging.info( "*********************************************************************" ) if utils.need_update_data(): logging.info("更新数据") process.run() stocks = utils.get_stocks() m_filter = strategy(end_date=None) results = list(filter(m_filter, stocks)) logging.info('选股结果:{0}'.format(results)) logging.info( "*********************************************************************" )
def getCrontab(self, user=None, timeout=20): cmd = 'crontab -l' if user: cmd += ' -u %s' % user out, err, code = process.run(cmd, timeout) if code != 0: return None return out
def removeCrontab(self, user=None, timeout=20): cmd = 'crontab -r' if user: cmd += ' -u %s' % user out, err, code = process.run(cmd, timeout) if code != 0: return False return True
def make_run_commit(config): success_path = Path(SUCCESS_PATH) failure_path = Path(FAILURE_PATH) if success_path.exists(): if failure_path.exists(): raise Exception('Found %s and %s' % (success_path, failure_path)) status = 'succeeded' elif failure_path.exists(): status = 'failed' else: raise Exception('Found neither %s nor %s' % (success_path, failure_path)) if MSG_PATH.exists(): with MSG_PATH.open('r') as f: msg = f.read() MSG_PATH.unlink() else: msg = '%s: %s' % (now_str, status) state_paths = strs(config, 'state') out_paths = strs(config, 'out') files = [ OUT_PATH, ERR_PATH, DOCKERFILE_PATH, success_path, failure_path, ] \ + state_paths \ + out_paths git.add(files) name = get_name(config) git.set_user_configs(name) # "-q" is necessary when committing files >1GB; https://public-inbox.org/git/[email protected]/t/ run([ 'git', 'commit', '-a', '-q', '--allow-empty', '-m', msg ]) run_sha = git.sha() print('Committed run: %s' % run_sha) return run_sha, msg
def snapshot(): req = flask.request.json if 'name' not in req: return (json.dumps({ "status": "error", "message": "name not given" }), 500) if re.match('^[a-zA-Z0-9_.]{3,}$', req['name']) is None: return (json.dumps({ "status": "error", "message": "name not allow" }), 500) if not lists_find(req['name']): return (json.dumps({ "status": "error", "message": "name not found" }), 500) if 'snap' not in req: return (json.dumps({ "status": "error", "message": "snap name not given" }), 500) if re.match('^[a-zA-Z0-9_.:-]{3,}$', req['snap']) is None: return (json.dumps({ "status": "error", "message": "snap name not allow" }), 500) if flask.request.method == 'POST': ret = process.run([ 'zfs', 'snap', 'zroot/usr/jails/{}@{}'.format(req['name'], req['snap']) ]) elif flask.request.method == 'DELETE': ret = process.run([ 'zfs', 'destroy', 'zroot/usr/jails/{}@{}'.format(req['name'], req['snap']) ]) if ret[0] != 0: return (json.dumps({ "status": "error", "message": "zfs: " + str(ret[1]) })) return json.dumps({"status": "success"})
def add(files, *args): paths = [] missing_paths = [] for file in files: path = Path(file) if path.exists(): paths.append(path) else: missing_paths.append(str(path)) if missing_paths: if len(missing_paths) == 1: paths_str = ' %s' % missing_paths[0] else: paths_str = '\n\t'.join([''] + missing_paths) print('Skipping adding non-existent paths:%s' % paths_str) if paths: run(['git', 'add'] + list(args) + ['--'] + paths)
def setCrontabFile(self, crontabFile, user=None, timeout=20): if not crontabFile: return False cmd = 'crontab %s' % crontabFile if user: cmd += ' -u %s' % user out, err, code = process.run(cmd, timeout) if code != 0: Log.cout(Log.ERROR, 'Set crontab failed: %s' % err) return False return True
def my_fork(): child_pid = os.fork() if child_pid == 0: print "child process: %s" % os.getpid() print "envionment: %s" % os.environ["PATH"] r = process.run("./test.sh") r.wait() print "subprocess envionment: %s" % r.stdout else: print "parent process: %s" % os.getpid() print "envionment: %s" % os.environ["PATH"]
def do_GET(self): print(self.path) match = re.match(r'^/([0-9a-fA-F]+)/.*$', self.path) if match and match.group(1) == SECRET_PATH: args = parse_qs(urlparse(self.path).query) username = base64.b64decode(args.get('username')[0]) email = base64.b64decode(args.get('email')[0]) realname = base64.b64decode(args.get('realname')[0]) admin = base64.b64decode(args.get('admin')[0]) args = [] args.append(ADD_USER_PATH) args.append(username) args.append(email) args.append(realname) args.append(admin) print('Adding {} {}'.format(username, email)) process.run('php', args, WORKING_DIR) self.send_response(200) return http.server.SimpleHTTPRequestHandler.do_GET(self)
def delete(): req = flask.request.json if 'name' not in req: return (json.dumps({ "status": "error", "message": "name not given" }), 500) if re.match('^[a-zA-Z0-9_.]{3,}$', req['name']) is None: return (json.dumps({ "status": "error", "message": "name not allow" }), 500) if not lists_find(req['name']): return (json.dumps({ "status": "error", "message": "name not found" }), 500) # ezjail-admin delete ret = process.run(["ezjail-admin", "delete", req['name']]) if ret[0] != 0: return (json.dumps({ "status": "error", "message": "ezjail:" + str(ret[1]) }), 500) # zfs destroy ret = process.run([ "zfs", "destroy", "-r", "zroot/usr/jails/{name}".format(name=req['name']) ]) if ret[0] != 0: return (json.dumps({ "status": "error", "message": "zfs:" + str(ret[1]) }), 500) # write key os.unlink("key/{name}.pub".format(name=req['name'])) os.rmdir("/usr/jails/{name}".format(name=req['name'])) # setup key process.run(["sh", "keygen.sh"]) return json.dumps({"status": "success"})
def control(): req = flask.request.json li = lists(False)[req['name']] if req['action'] == 'start': ret = process.run( ["ifconfig", "em0", "add", li['ip'], "netmask", "255.255.255.0"]) ret = process.run(["ezjail-admin", "onestart", req['name']]) if ret[0] != 0: return (json.dumps({"status": "error", "message": ret[1]}), 500) elif req['action'] == 'stop': ret = process.run(["ifconfig", "em0", "delete", li['ip']]) ret = process.run(["ezjail-admin", "onestop", req['name']]) if ret[0] != 0: return (json.dumps({"status": "error", "message": ret[1]}), 500) else: return (json.dumps({ "status": "error", "message": "action not recognize, just start or stop" }), 500) return json.dumps({"status": "success"})
def lists(tojson=True): results = {} rets = process.run(["ezjail-admin", "list"])[1] for ret in rets[2:]: col = ret.split() results[col[3]] = { "name": col[3], "ip": col[2], "running": col[1] != 'N/A', } if tojson == True: return json.dumps(results) return results
def status(): results = {"clusters": [], "disk": 0} for node in nodes: r = requests.get(nodes[node] + '/status') res = r.json() res['name'] = node results['clusters'].append(res) ret = process.run(["zfs", "list", "-Hp", "zroot"]) if ret[0] != 0: return json.dumps({"status": "error", "message": "zfs: " + ret[1]}) ret = ret[1][0].split() results['disk'] = round( float(ret[1]) / (int(ret[1]) + int(ret[2])) * 100, 2) return json.dumps(results)
def checkout(branch, default_sha=None, return_sha=True, is_branch=True): if is_branch and not success('git', 'show-branch', branch): if not default_sha: raise Exception( 'Branch %s not found, and no default SHA provided' % branch) run(['git', 'branch', branch, default_sha]) run(['git', 'checkout', branch]) return default_sha run(['git', 'checkout', branch]) if return_sha: return sha() return None
def do_job(job, context): build=context['build'] #build dir应该不存在,如果存在,那么抛异常 if os.path.exists(build_dir(job, build)): raise Exception('Build dir ' + build_dir(job, build) + " has already existed") else: os.makedirs(build_dir(job, build)) #从do_job开始记录log,所以先删除原有log if os.path.isfile(build_dir(job, build, LOG_FILE)): os.remove(build_dir(job, build,LOG_FILE)) #调用用户命令 try: #调用用户hook hook.pre_do_job_hook(job, build, context, job_log) if job.vcs_type != 'none': do_vcs(job, build, context, context['vcsVer']) #准备环境变量, 注意utf8要用编码处理一下,locale字符串(job_dir)就不需要了 os.environ["topo_builder_job"]=job.name.encode(locale.getdefaultlocale()[1]) os.environ["topo_builder_job_id"]=str(job.id) os.environ["topo_builder_build_id"]=str(build.id) os.environ["topo_builder_ver"]=build.version.encode(locale.getdefaultlocale()[1]) os.environ["topo_builder_artifact"]=job_dir(job, job.artifact) #job.cmd不做job_dir处理,因为可能别是写python...,所以要求创建job的人这里就写绝对路径,或者path里可以找到的相对路径 context['code'] = process.run(job.cmd, log=build_dir(job, build, LOG_FILE), dispTarget=True, cwd=job_dir(job, job.cmd_dir), sync=(job.type == "standard"), terminatable=True) build.artifact = str(job.id) + "/" + str(build.id) + "/" build.code = str(context['code']) #数据库里,这个是字符串,内存里是int,方便用户写大于小于 build.save() #调用用户hook hook.post_do_job_hook(job, build, context, job_log) except Exception, e: job_log(job, build, traceback.format_exc())
def snapshots(): li = lists(False, False) result = {} for l in li: l = l['name'] ret = process.run( ['zfs', 'list', '-Hprt', 'all', "zroot/usr/jails/{}".format(l)]) ret = ret[1] col = ret[0].split() result[l] = { "used": int(col[1]), "available": int(col[1]) + int(col[2]), "snapshots": [] } for line in ret[1:]: col = line.split() result[l]["snapshots"].append({ "name": col[0].split('@')[1], "used": int(col[1]) }) return json.dumps(result)
def push(remote, src=None, dest=None): if dest is None or src == dest: dest = src refspec = dest else: if src is None and dest is not None: src = 'HEAD' refspec = '%s:%s' % (src, dest) cmd = ['git', 'push', remote, refspec] try: run(cmd) except CalledProcessError: print('Failed to push %s/%s; attempting a merge:' % (remote, refspec)) run([ 'git', 'merge', '-X', 'ours', '--no-edit', '%s/%s' % (remote, dest) ]) print('Trying to push again:') run(cmd)
def update(self, work_copy, user, password, ver="HEAD", log=None): if not os.path.exists(work_copy): raise Exception(work_copy + " not exist") return process.run("svn update -r " + ver + " " + StrUtil.quote(work_copy) + self._get_svn_para(user, password), log=log)
def fetch(*remotes): for remote in remotes: run(['git', 'fetch', remote])
def set_user_configs(name): if not success('git', 'config', 'user.name'): run(['git', 'config', 'user.name', name]) if not success('git', 'config', 'user.email'): run(['git', 'config', 'user.email', '%s@%s' % (name, name)])
def allow_pushes(): run(['git', 'config', 'receive.denyCurrentBranch', 'ignore'])
def revert(self, work_copy, user, password, log=None): if not os.path.exists(work_copy): raise Exception(work_copy + " not exist") return process.run("svn revert " + StrUtil.quote(work_copy) + self._get_svn_para(user, password), log=log)
img = { 'style': img_load(style, img_size, device), 'content': img_load(content, img_size, device) } # Create the input image img['input'] = img['content'].clone() # Print information print('Running the algorithm...') # Add our loss and normalization modules in the model style_model, losses = add_modules(model, norm_mean, norm_std, img, layers, device, replace_max_to_avg) # Run the algorithm output, style_scores, content_scores = run(style_model, img, num_steps, weights, losses, scheduler) # Print information print('\nThe algorithm was executed successfully !') ############### # Exportation # ############### # Get the full output path full_output_path = '{}{}-{}-{}-{}-{}-{}'.format( output_path, os.path.splitext(os.path.basename(style))[0], os.path.splitext(os.path.basename(content))[0], model_name, 'pretrained' if model_pretrained else 'notpretrained',
def searchfile(): total = 0 total_err = 0 path = os.path.expanduser(config.CONFIG['path']) host = config.CONFIG['host'] port = config.CONFIG['port'] user = config.CONFIG['id'] password = config.CONFIG['password'] dbname = config.CONFIG['dbname'] start = sys.argv[1] end = sys.argv[2] is_all = False if start == None: is_all = True now = datetime.datetime.now() progress_file = config.CONFIG[ 'file_list_path'] + "/progress_" + now.strftime( '%Y-%m-%d_%H:%M:%S') + ".txt" progressed_log_file = open( config.CONFIG['file_list_path'] + "/progress_" + now.strftime('%Y-%m-%d_%H:%M:%S') + ".txt", 'w') progressed_log_file.write('Processing from ' + start + ' to ' + end + '\n') progressed_log_file.close() db = database(host, port, user, password, dbname) start_time = time.time( ) #If you want to measure the actual running time of this program, you can use it. is_process = 0 r = run() for out_dir in sorted(os.listdir(path)): if is_all: out_dir_path = path + "/" + out_dir file_path = out_dir_path + "/log_tcp_complete" r.fileread(file_path, out_dir, db, progress_file) else: if out_dir.startswith(start): is_process = 1 elif out_dir.startswith(end): is_process = 2 if is_process == 1 or is_process == 2: if is_process == 2 and out_dir.startswith(end) == False: break out_dir_path = path + "/" + out_dir file_path = out_dir_path + "/log_tcp_complete" r.fileread(file_path, out_dir, db, progress_file) total = r.total total_err = r.total_err analyze(total, total_err) end_time = time.time() running_time = int(end_time - start_time) print('Total Running time : {:02d}:{:02d}:{:02d}'.format( running_time // 3600, (running_time % 3600 // 60), running_time % 60))
def main(): process.run()
return sourceArray, editedSourceArray def saveOutputs(outDir, diff=None): if not diff==None: plt.imsave(os.path.join(syntheticsDirPath, 'diff.png'), diff) return 0 """Main fonction This is the main """ if __name__ == '__main__': logging.info('Start main') source, editedSource = getInputs() difference = process.run(params, source, editedSource) utils.saveOutput(syntheticsDirPath, difference, 'difference') logging.info('End main') ##Heart of C code. # void Pima::run(const char * outputName) # { # unsigned int IPS(0),JPS(0),IS(0),JS(0),cpt(0); # double Cprec(0.0); # float cpt2(0.9); # CImg<unsigned char> SRC_prec(srcToFillRGB); # SRC_prec.fill(0); # initialize_Pp(); # my_display(cpt); # while(nb_points!=0) # {
def run_module( module, preserve_tmp_clones=False, capture_output=True, shell=False, ports=None, ): module = Path(module).absolute().resolve() runs_path = get_runs_clone(module) if capture_output: runner_logs_dir = runs_path / LOGS_DIR / RUNNER_LOGS_DIR / now_str runner_logs_dir.mkdir(parents=True) stdout_path = runner_logs_dir / RUNNER_STDOUT_BASENAME stderr_path = runner_logs_dir / RUNNER_STDERR_BASENAME print('Redirecting stdout/stderr to %s, %s' % (stdout_path, stderr_path)) original_stdout = sys.stdout original_stderr = sys.stderr sys.stdout = stdout_path.open('w') sys.stderr = stderr_path.open('w') print('%s: module %s starting' % (now_str, module)) try: dir = TemporaryDirectory(prefix='gismo_') if preserve_tmp_clones: from contextlib import nullcontext ctx = nullcontext() else: ctx = dir with ctx: dir = Path(dir.name) with cd(module): config = load_config() name = get_name(config) dockerfile_src, cmd = make_cmd(config, dir, shell=shell, ports=ports) run([ 'git', 'clone', module, dir ]) dockerfile = dir / DOCKERFILE_PATH print('Installing Dockerfile %s in temporary clone: %s' % (dockerfile_src, dockerfile)) dockerfile_src.rename(dockerfile) git.set_user_configs(name) with cd(dir): run([ 'docker', 'build', '-t', name, '-f', dockerfile, '.' ]) remote = git.remote() # if not upstream_branch: # upstream_branch = DEFAULT_UPSTREAM_BRANCH [ _remote, upstream_branch ] = line(['git','rev-parse','--abbrev-ref','--symbolic-full-name','@{u}']).split('/') assert _remote == remote upstream_remote_branch = '%s/%s' % (remote, upstream_branch) original_upstream_sha = git.sha(upstream_remote_branch) print('Working from upstream branch %s (%s)' % (upstream_remote_branch, original_upstream_sha)) base_sha = git.sha() if original_upstream_sha != base_sha: print('Overriding cloned HEAD %s to start from upstream %s (%s)' % (base_sha, upstream_remote_branch, original_upstream_sha)) git.checkout(upstream_remote_branch) base_sha = original_upstream_sha run(cmd) if shell: return run_sha, msg = make_run_commit(config) merge_results( module, runs_path=runs_path, config=config, base_sha=base_sha, run_sha=run_sha, msg=msg, original_upstream_sha=original_upstream_sha, remote=remote, upstream_branch=upstream_branch, now_str=now_str, ) finally: if capture_output: print('Restoring stdout, stderr') sys.stdout.close() sys.stderr.close() sys.stdout = original_stdout sys.stderr = original_stderr
#!/usr/bin/env python import process process.run()