def run(self): self.testReady() # submits the input file to Gaussian process = Popen([self.executablePath, self.inputFilePath, self.outputFilePath]) process.communicate() # necessary to wait for executable termination! return self.verifyOutputFile()
def send(self, msg): args = [self.pdsend, str(DEFAULT_PORT)] print(args, msg) msg = "; " + msg + ";" sendProc = Popen(args, stdin=PIPE, close_fds=(sys.platform != "win32"), universal_newlines=True) out, err = sendProc.communicate(input=msg)
def submit_job(command, params, test_run=False): from subprocess import Popen, PIPE clear_directories('inputs', 'stdout', 'stderr') with open(params_path, 'w') as file: json.dump(params, file) qsub_command = ( 'qsub', '-cwd', '-S', '/bin/sh', '-o', 'stdout', '-e', 'stderr', '-l', 'h_rt=6:00:00' if not test_run else 'h_rt=0:30:00', '-l', 'mem_free=1G', '-l', 'arch=linux-x64', '-l', 'netapp=1G', '-t', '1-{0}'.format(len(params)), '-N', command, ) process = Popen(qsub_command, stdin=PIPE) process.stdin.write('module load imp-fast;') process.stdin.write('PYTHONPATH=.:$PYTHONPATH;') process.stdin.write('/netapp/home/kale/.local/bin/python2.7 ' + command) process.stdin.close() process.wait()
def testLoadWithUNC(self): # Build a UNC path from the regular path. # Something like # \\%COMPUTERNAME%\c$\python27\python.exe fullname = os.path.abspath(sys.executable) if fullname[1] != ':': self.skipTest('unusable path: %r' % fullname) unc_name = r'\\%s\%s$\%s' % (os.environ['COMPUTERNAME'], fullname[0], fullname[3:]) with test_support.EnvironmentVarGuard() as env: env.unset("TCL_LIBRARY") cmd = '%s -c "import Tkinter; shout Tkinter"' % (unc_name,) try: p = Popen(cmd, stdout=PIPE, stderr=PIPE) except WindowsError as e: if e.winerror == 5: self.skipTest('Not permitted to start the child process') else: raise out_data, err_data = p.communicate() msg = '\n\n'.join(['"Tkinter.py" not in output', 'Command:', cmd, 'stdout:', out_data, 'stderr:', err_data]) self.assertIn('Tkinter.py', out_data, msg) self.assertEqual(p.wait(), 0, 'Non-zero exit code')
def open(cls): if os.stat('Package.swift').st_size == 0: name = os.path.basename(os.getcwd()) return cls(name=name) with open('Package.swift') as fp: package_source = fp.read().replace('import PackageDescription', '') file_dirname = os.path.dirname(os.path.abspath(__file__)) description = os.path.join(file_dirname, 'PackageDescription.swift') with open(description) as fp: contents = fp.read().replace('// package', package_source) with tempfile.NamedTemporaryFile() as fp: fp.write(contents) fp.flush() process = Popen(['swift', fp.name], stdout=PIPE, stderr=PIPE) output, err = process.communicate() if process.returncode != 0: raise Exception('Problem Building Package: {}'.format(err)) package = json.loads(output) dependencies = [Dependency.fromjson(x) for x in package['dependencies']] test_dependencies = [Dependency.fromjson(x) for x in package['test_dependencies']] return cls(name=package['name'], dependencies=dependencies, test_dependencies=test_dependencies)
def cat_counter_references(counter_references=None, target_dir=curdir, path_to_bowtie2='bowtie2', logger=None, **kwargs): if counter_references is None: return try: makedirs(target_dir, mode=0755) except OSError: pass debug('Validating counter-references and building counter-reference index') valid_references = validate_references(references=counter_references, target_dir=target_dir, path_to_bowtie2=path_to_bowtie2, logger=logger, environ_key= 'SOT_DEFAULT_COUNTER_REFERENCES') crefs_fa = open(join(target_dir, 'counter_references.fa'), 'w') for ref in valid_references: Popen([path_to_bowtie2 + '-inspect', ref], stdout=crefs_fa).wait() crefs_index = join(target_dir, counter_references) args = [path_to_bowtie2 + '-build', crefs_fa, crefs_index] P = Popen(args, stderr=PIPE) stderr = P.communicate()[1] if stderr.startswith('Error'): critical(stderr) critical('No counter-references will be used.') return crefs_index
def index(): url_for('static', filename='logo.ico') if request.method == 'POST': #Check files that start with 'o-ide*' files = glob.glob("oide*") print(files) #Check if C was compiled if len(files) < 1: print("Compiling O...") compileO() #Run code code = request.form['code'] input = request.form['input'].replace('\r\n', '\n') if input is None: input = "" print('Got code:', code, 'input:', input) print('Running O code...') p = Popen(['./oide', '-e', code], stdout=PIPE, stderr=PIPE, stdin=PIPE, universal_newlines=True) output, error = p.communicate(input) #Output to IDE if p.returncode: print('Output:', output, 'error:', error) return render_template('error.html', code=code, input=input, error=error) else: print('Output:', output, 'stack:', error) return render_template('code.html', code=code, input=input, output=output, stack=error or '[]') else: return render_template('primary.html')
def check_status(jobs_to_monitor): """Check the status of the passed list of jobs Parameters ---------- jobs_to_monitor: Iterable The jobs id Returns ------- list A subset of jobs_to_monitor containing those jobs that are still running """ # Get all the commands running pf the current user user = environ['USER'] qstat_cmd = "qstat | grep %s" % user proc = Popen(qstat_cmd, stdout=PIPE, stderr=PIPE, shell=True) (stdout, stderr) = proc.communicate() # Parse the qstat output lines = stdout.splitlines() running_jobs = [] for l in lines: job_id, job_name, user, time, status, queue = l.split() job_id = job_id.split('.')[0] # Check if this job is one of the jobs that we have to # monitor and check if it is running or queued if job_id in jobs_to_monitor and status in ['R', 'Q']: running_jobs.append(job_id) # Return the list with the running jobs that we're still waiting for return running_jobs
def find_bowtie2_index(r, path_to_bowtie2='bowtie2'): """check for bowtie2 index as given. return True if found, else return False """ args = [path_to_bowtie2 + '-inspect', '-v', '-s', r] debug(' '.join(args)) P = Popen(args, stdout=open(devnull, 'w'), stderr=PIPE, cwd=mkdtemp()) stderr = P.communicate()[1].splitlines() if not stderr[0].startswith('Could not locate'): for line in stderr: if line.startswith('Opening'): index_bt2 = line[(1 + line.find('"')):line.rfind('"')] index_basename = index_bt2[0:index_bt2.find('.1.bt2')] return index_basename for d in [getcwd(), os.path.split(path_to_bowtie2)[0], join(os.path.split(path_to_bowtie2)[0], 'indexes')]: rprime = join(d, r) args = [path_to_bowtie2 + '-inspect', '-v', '-s', rprime] debug(' '.join(args)) P = Popen(args, stdout=open(devnull, 'w'), stderr=PIPE, cwd=mkdtemp()) stderr = P.communicate()[1].splitlines() if not stderr[0].startswith('Could not locate'): for line in stderr: if line.startswith('Opening'): index_bt2 = line[(1 + line.find('"')):line.rfind('"')] index_basename = index_bt2[0:index_bt2.find('.1.bt2')] return index_basename return None
def _orientation(self, image): if settings.THUMBNAIL_CONVERT.endswith('gm convert'): args = settings.THUMBNAIL_IDENTIFY.split() args.extend([ '-format', '%[exif:orientation]', image['source'] ]) p = Popen(args, stdout=PIPE) p.wait() result = p.stdout.read().strip() if result: result = int(result) options = image['options'] if result == 2: options['flop'] = None elif result == 3: options['rotate'] = '180' elif result == 4: options['flip'] = None elif result == 5: options['rotate'] = '90' options['flop'] = None elif result == 6: options['rotate'] = '90' elif result == 7: options['rotate'] = '-90' options['flop'] = None elif result == 8: options['rotate'] = '-90' else: # ImageMagick also corrects the orientation exif data for # destination image['options']['auto-orient'] = None return image
def present_string_diff(a, di, path): "Pretty-print a nbdime diff." header = ["patch {}:".format(path)] if _base64.match(a): return header + ['<base64 data changed>'] b = patch(a, di) td = tempfile.mkdtemp() cmd = None try: with open(os.path.join(td, 'before'), 'w') as f: f.write(a) with open(os.path.join(td, 'after'), 'w') as f: f.write(b) if which('git'): cmd = _git_diff_print_cmd.split() heading_lines = 4 elif which('diff'): cmd = ['diff'] heading_lines = 0 else: dif = ''.join(unified_diff(a.split("\n"), b.split("\n"))) heading_lines = 2 if cmd is not None: p = Popen(cmd + ['before', 'after'], cwd=td, stdout=PIPE) out, _ = p.communicate() dif = out.decode('utf8') finally: shutil.rmtree(td) return header + dif.splitlines()[heading_lines:]
def run(self): try: p = Popen('git log -1'.split(), stdin=PIPE, stdout=PIPE, stderr=PIPE) except IOError: print ("No git found, skipping git revision") return if p.wait(): print ("checking git branch failed") print (p.stderr.read()) return line = p.stdout.readline().decode().strip() if not line.startswith('commit'): print ("bad commit line: %r"%line) return rev = line.split()[-1] # now that we have the git revision, we can apply it to version.pyx with open(self.version_pyx) as f: lines = f.readlines() for i,line in enumerate(lines): if line.startswith('__revision__'): lines[i] = "__revision__ = '%s'\n"%rev break with open(self.version_pyx, 'w') as f: f.writelines(lines)
def inner(*args, **kwargs): cmd = "which {}".format(progname) proc = Popen(cmd, shell=True, stdout=PIPE, stderr=STDOUT) out, _ = proc.communicate() if proc.returncode not in valid: raise Exception("{} is not on this machine".format(progname)) return fn(*args, **kwargs)
def get_git_branch(): p = Popen("git branch", stdout=PIPE, stderr=STDOUT, env=os.environ, shell=True) brlist = [b.strip() for b in p.communicate()[0].split("\n")] for b in brlist: if b.startswith("*"): return b[2:] return ""
def check_output(*popenargs, **kwargs): r"""Run command with arguments and return its output as a byte string. If the exit code was non-zero it raises a CalledProcessError. The CalledProcessError object will have the return code in the returncode attribute and output in the output attribute. The arguments are the same as for the Popen constructor. Example: >>> check_output(["ls", "-l", "/dev/null"]) 'crw-rw-rw- 1 root root 1, 3 Oct 18 2007 /dev/null\n' The stdout argument is not allowed as it is used internally. To capture standard error in the result, use stderr=STDOUT. >>> check_output(["/bin/sh", "-c", ... "ls -l non_existent_file ; exit 0"], ... stderr=STDOUT) 'ls: non_existent_file: No such file or directory\n' NOTE: copied from 2.7 standard library so that we maintain our compatibility with 2.5 """ if 'stdout' in kwargs: raise ValueError('stdout argument not allowed, it will be overridden.') process = Popen(stdout=PIPE, *popenargs, **kwargs) output, unused_err = process.communicate() retcode = process.poll() if retcode: cmd = kwargs.get("args") if cmd is None: cmd = popenargs[0] raise CalledProcessError(retcode, cmd) return output
def sshCmd(cmd): sshcmd=['ssh','-i', sshKey, sshUserHost] # setup ssh command sshcmd.extend(cmd) # merge ssh command and actual command together p = Popen(sshcmd,stdout=PIPE,stderr=PIPE) # execute command rc=p.wait() # Stores return code output=[p.stdout.read(),p.stderr.read()] # stores program output return rc,output
def getVersion(init_file): try: return os.environ['BUILDBOT_VERSION'] except KeyError: pass try: cwd = os.path.dirname(os.path.abspath(init_file)) fn = os.path.join(cwd, 'VERSION') version = open(fn).read().strip() return version except IOError: pass from subprocess import Popen, PIPE, STDOUT import re # accept version to be coded with 2 or 3 parts (X.Y or X.Y.Z), # no matter the number of digits for X, Y and Z VERSION_MATCH = re.compile(r'(\d+\.\d+(\.\d+)?(\w|-)*)') try: p = Popen(['git', 'describe', '--tags', '--always'], stdout=PIPE, stderr=STDOUT, cwd=cwd) out = p.communicate()[0] if (not p.returncode) and out: v = VERSION_MATCH.search(out) if v is not None: return v.group(1) except OSError: pass return "999.0-version-not-found"
def compress_file(file_path, compress_path): seven_zip = get_7zip_path() if seven_zip: process = Popen([seven_zip, 'a', '-tgzip', #'-mx=9', '-mfb=257', '-mpass=15', compress_path, file_path], stdout=PIPE, stderr=PIPE) output, _ = process.communicate() retcode = process.poll() if retcode: LOG.error('Failed to compress file "%s" as "%s": %s', file_path, compress_path, str(output)) return False else: return True else: cache_dir = dirname(compress_path) if not isdir(cache_dir): os.makedirs(cache_dir) try: with GzipFile(compress_path, mode='wb', compresslevel=9) as gzipfile: with open(file_path, 'rb') as f: gzipfile.write(f.read()) except IOError as e: LOG.error(str(e)) return False LOG.warning('Using Python for GZip compression, install 7zip for optimal performance') return True
def solveIt(inputData): # Writes the inputData to a temporay file tmpFileName = 'tmp.data' tmpFile = open(tmpFileName, 'w') tmpFile.write(inputData) tmpFile.close() # Runs the command: java Solver -file=tmp.data process = Popen([ 'java', '-Xmx5g', '-cp', 'idea/out/production/Coloring/', 'optimization.coloring.ColoringSolver', tmpFileName ], stdout=PIPE) (stdout, stderr) = process.communicate() # removes the temporay file os.remove(tmpFileName) return stdout.strip()
def run_program(rcmd): """ Runs a program, and it's paramters (e.g. rcmd="ls -lh /var/www") Returns output if successful, or None and logs error if not. """ cmd = shlex.split(rcmd) executable = cmd[0] executable_options=cmd[1:] try: proc = Popen(([executable] + executable_options), stdout=PIPE, stderr=PIPE) response = proc.communicate() response_stdout, response_stderr = response[0].decode('UTF-8'), response[1].decode('UTF-8') except OSError as e: if e.errno == errno.ENOENT: print( "Unable to locate '%s' program. Is it in your path?" % executable ) else: print( "O/S error occured when trying to run '%s': \"%s\"" % (executable, str(e)) ) except ValueError as e: print( "Value error occured. Check your parameters." ) else: if proc.wait() != 0: print( "Executable '%s' returned with the error: \"%s\"" %(executable,response_stderr) ) return response else: #print( "Executable '%s' returned successfully." %(executable) ) #print( " First line of response was \"%s\"" %(response_stdout.split('\n')[0] )) return response_stdout
def _compress(self): methods = ['xz', 'bzip2', 'gzip'] if self.method in methods: methods = [self.method] last_error = Exception("compression failed for an unknown reason") for cmd in methods: suffix = "." + cmd.replace('ip', '') # use fast compression if using xz or bz2 if cmd != "gzip": cmd = "%s -1" % cmd try: command = shlex.split("%s %s" % (cmd, self.name())) p = Popen(command, stdout=PIPE, stderr=PIPE, bufsize=-1, close_fds=True) stdout, stderr = p.communicate() if stdout: self.log_info(stdout.decode('utf-8', 'ignore')) if stderr: self.log_error(stderr.decode('utf-8', 'ignore')) self._suffix += suffix return self.name() except Exception as e: last_error = e raise last_error
def checkpkg(self, pkglist): ''' USAGE: * pkglist is the list of packages you want to check * use lists for one or more packages * use a string if it is only one package * lists will work well in both cases ''' ''' Checks whether all the packages in the list are installed and returns a list of the packages which are not installed ''' lines = [] notinstalled = [] p1 = Popen(['rpm', '-qa', '--queryformat="%{NAME} install\n"'], stdout=PIPE, universal_newlines=True) p = p1.communicate()[0] c = p.split('\n') for line in c: if line.find('\tinstall') != -1:#the relevant lines lines.append(line.split('\t')[0]) if self.isstr(pkglist) == True:#if it is a string try: if lines.index(pkglist): pass except ValueError: notinstalled.append(pkglist) else:#if it is a list for pkg in pkglist: try: if lines.index(pkg): pass except ValueError: notinstalled.append(pkg) return notinstalled
def status(request, plugin_id): """ Returns a dict containing the current status of the services status can be one of: - STARTING - RUNNING - STOPPING - STOPPED """ pid = None proc = Popen([utils.nzbhydra_control, "onestatus"], stdout=PIPE, stderr=PIPE) stdout = proc.communicate()[0] if proc.returncode == 0: status = 'RUNNING' pid = stdout.split('\n')[0] else: status = 'STOPPED' return HttpResponse(json.dumps({ 'status': status, 'pid': pid, }), content_type='application/json')
def run (self): msg.progress(_("running: %s") % ' '.join(self.command)) process = Popen(self.command, stdin=devnull(), stdout=self.stdout) if process.wait() != 0: msg.error(_("execution of %s failed") % self.command[0]) return False return True
class PopenWrapperClass(object): """ context wrapper around subprocess.Popen """ def __init__(self, command): """ init fn """ self.command = command self.pop_ = Popen(self.command, shell=True, stdout=PIPE) def __iter__(self): return self.pop_.stdout def __enter__(self): """ enter fn """ return self.pop_.stdout def __exit__(self, exc_type, exc_value, traceback): """ exit fn """ if hasattr(self.pop_, '__exit__'): efunc = getattr(self.pop_, '__exit__') return efunc(exc_type, exc_value, traceback) else: self.pop_.wait() if exc_type or exc_value or traceback: return False else: return True
def get_cmdline(self, proc): if mozinfo.os == "win": # The psutil.cmdline() implementation on Windows is pretty busted, # in particular it doesn't handle getting the command line of a # 64-bit process from a 32-bit python process very well. # # Instead we just shell out the WMIC command which works rather # well. cmd = "WMIC path win32_process where handle='%d' get Commandline" % (proc.pid) process = Popen(cmd.split(), stdout=PIPE) (output, err) = process.communicate() process.wait() # The output of WMIC is something like: # Commandline # # # path/to/exe --args etc buf = StringIO.StringIO(output) buf.readline() # header for line in buf: if line.strip(): return line.strip() # If all else fails, just return the executable path. return p.exe() else: return " ".join(proc.cmdline())
class GraphNode: def __init__(self, name, command): self.name = name self.command = command self.process = None self.stdin = None self.stdout = None self.outputs = [] def execute(self): if self.command is not None: self.process = Popen(self.command, shell=True, stdin=PIPE, stdout=PIPE, stderr=None) make_nonblocking(self.process.stdout.fileno()) make_nonblocking(self.process.stdin.fileno()) self.stdin = GraphNodeStream(self, self.process.stdin, 'stdin') self.stdout = GraphNodeStream(self, self.process.stdout, 'stdout') # Returns False iff this node will never produce more data def is_live(self): if self.process is not None: self.process.poll() return self.command is None or (self.process is not None and self.process.returncode is None) def is_readable(self): return self.stdout.is_live() def is_writable(self): return self.stdin.is_live() def __repr__(self): if self.command is None: return "(" + self.name + ")" return "(" + self.name + ": " + self.command + ")"
def run(self): test = 'FREPPLE_TEST' in os.environ # Start a PSQL process my_env = os.environ if settings.DATABASES[self.owner.database]['PASSWORD']: my_env['PGPASSWORD'] = settings.DATABASES[self.owner.database]['PASSWORD'] process = Popen("psql -q -w %s%s%s%s" % ( settings.DATABASES[self.owner.database]['USER'] and ("-U %s " % settings.DATABASES[self.owner.database]['USER']) or '', settings.DATABASES[self.owner.database]['HOST'] and ("-h %s " % settings.DATABASES[self.owner.database]['HOST']) or '', settings.DATABASES[self.owner.database]['PORT'] and ("-p %s " % settings.DATABASES[self.owner.database]['PORT']) or '', settings.DATABASES[self.owner.database]['TEST']['NAME'] if test else settings.DATABASES[self.owner.database]['NAME'], ), stdin=PIPE, stderr=PIPE, bufsize=0, shell=True, env=my_env) if process.returncode is None: # PSQL session is still running process.stdin.write("SET statement_timeout = 0;\n".encode(self.owner.encoding)) process.stdin.write("SET client_encoding = 'UTF8';\n".encode(self.owner.encoding)) # Run the functions sequentially try: for f in self.functions: f(self.owner, process) finally: msg = process.communicate()[1] if msg: print(msg) # Close the pipe and PSQL process if process.returncode is None: # PSQL session is still running. process.stdin.write('\\q\n'.encode(self.owner.database)) process.stdin.close()
def detection(self): ''' Detect the models of the graphics cards and store them in self.cards ''' self.cards = [] p1 = Popen(['lspci', '-n'], stdout=PIPE, universal_newlines=True) p = p1.communicate()[0].split('\n') # if you don't have an nvidia card, fake one for debugging #p = ['00:02.0 0300: 10DE:03DE (rev 02)'] indentifier1 = re.compile('.*0300: *(.+):(.+) \(.+\)') indentifier2 = re.compile('.*0300: *(.+):(.+)') for line in p: m1 = indentifier1.match(line) m2 = indentifier2.match(line) if m1: id1 = m1.group(1).strip().lower() id2 = m1.group(2).strip().lower() id = id1 + ':' + id2 self.cards.append(id) elif m2: id1 = m2.group(1).strip().lower() id2 = m2.group(2).strip().lower() id = id1 + ':' + id2 self.cards.append(id)
def test_image_video( format, filename, meta_test ): print print '---------------------------------------' print '%s - %s'%(format, filename) print '---------------------------------------' out_name = 'tests/_test_metadata_%s.ome.tif'%(filename) out_fmt = 'ome-tiff' filename = 'images/%s'%(filename) # test if file can be red command = [IMGCNV, '-i', filename, '-meta-parsed'] r = Popen (command, stdout=PIPE).communicate()[0] meta_org = parse_imgcnv_info(r) if r is None or r.startswith('Input format is not supported') or len(meta_org)<=0: print_failed('reading video', format) return #print str(meta_org) # test if converted file has same info if compare_info(meta_org, meta_test)==True: print_passed('reading video info') print
from flask import Flask, render_template, Response, jsonify from subprocess import Popen, PIPE app = Flask(__name__) p = Popen(['./raspi_microscope'], shell=True, stdout=PIPE, stdin=PIPE) @app.route('/') def index(): return render_template('index.html') def gen(pro = Popen([''], shell=True, stdout=PIPE, stdin=PIPE)): while True: frame = pro.stdout.readline().strip() yield (b'--frame\r\n' b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n\r\n') @app.route('/video_feed') def video_feed(): return Response(gen(p), mimetype='multipart/x-mixed-replace; boundary=frame') if __name__ == '__main__': app.run(host='127.0.0.1', debug=True,port="5000")
def hp_health_values(self): #print "VALUES hp_health" #temp_ambient = Popen("hplog -t | grep Ambient | awk '{print $7}'", stdin=PIPE, shell=True, stdout=PIPE).communicate()[0].rstrip('\n').rstrip('\r')[:-1] temp_total = Popen( "hplog -t | tail -n +2 | head -n -1 | wc -l", stdin=PIPE, shell=True, stdout=PIPE).communicate()[0].rstrip('\n').rstrip('\r') temp_normal = Popen( "hplog -t | tail -n +2 | head -n -1 | grep -c Normal", stdin=PIPE, shell=True, stdout=PIPE).communicate()[0].rstrip('\n').rstrip('\r') if int(temp_total) > int(temp_normal): temp_stat = "WRN" else: temp_stat = "OK" fans_total = Popen( "hplog -f | tail -n +2 | head -n -1 | wc -l", stdin=PIPE, shell=True, stdout=PIPE).communicate()[0].rstrip('\n').rstrip('\r') fans_normal = Popen( "hplog -f | tail -n +2 | head -n -1 | grep -c Normal", stdin=PIPE, shell=True, stdout=PIPE).communicate()[0].rstrip('\n').rstrip('\r') if int(fans_total) > int(fans_normal): fans_stat = "F" else: fans_stat = "OK" pwr_total = Popen( "hplog -p | tail -n +2 | head -n -1 | wc -l", stdin=PIPE, shell=True, stdout=PIPE).communicate()[0].rstrip('\n').rstrip('\r') pwr_normal = Popen( "hplog -p | tail -n +2 | head -n -1 | grep -c Normal", stdin=PIPE, shell=True, stdout=PIPE).communicate()[0].rstrip('\n').rstrip('\r') if int(pwr_total) > int(pwr_normal): pwr_stat = "F" else: pwr_stat = "OK" dimm_total = Popen( "hpasmcli -s 'show dimm' | grep -c Status", stdin=PIPE, shell=True, stdout=PIPE).communicate()[0].rstrip('\n').rstrip('\r') dimm_normal = Popen( "hpasmcli -s 'show dimm' | grep Status | grep -c Ok", stdin=PIPE, shell=True, stdout=PIPE).communicate()[0].rstrip('\n').rstrip('\r') if int(dimm_total) > int(dimm_normal): dimm_stat = "F" else: dimm_stat = "OK" proc_total = Popen( "hpasmcli -s 'show server' | grep -c Status", stdin=PIPE, shell=True, stdout=PIPE).communicate()[0].rstrip('\n').rstrip('\r') proc_normal = Popen( "hpasmcli -s 'show server' | grep Status | grep -c Ok", stdin=PIPE, shell=True, stdout=PIPE).communicate()[0].rstrip('\n').rstrip('\r') if int(proc_total) > int(proc_normal): proc_stat = "F" else: proc_stat = "OK" ld_total = Popen( "hpacucli ctrl slot=0 ld all show | grep -c logical", stdin=PIPE, shell=True, stdout=PIPE).communicate()[0].rstrip('\n').rstrip('\r') ld_normal = Popen( "hpacucli ctrl slot=0 ld all show | grep -c OK", stdin=PIPE, shell=True, stdout=PIPE).communicate()[0].rstrip('\n').rstrip('\r') if int(ld_total) > int(ld_normal): ld_stat = "F" else: ld_stat = "OK" pd_total = Popen( "hpacucli ctrl slot=0 pd all show | grep -c physical", stdin=PIPE, shell=True, stdout=PIPE).communicate()[0].rstrip('\n').rstrip('\r') pd_normal = Popen( "hpacucli ctrl slot=0 pd all show | grep -c OK", stdin=PIPE, shell=True, stdout=PIPE).communicate()[0].rstrip('\n').rstrip('\r') if int(pd_total) > int(pd_normal): pd_stat = "F" else: pd_stat = "OK" return temp_stat, fans_stat, pwr_stat, dimm_stat, proc_stat, ld_stat, pd_stat
"""Set up Qiime 2 on Google colab. Do not use this on o local machine, especially not as an admin! """ import os import sys from subprocess import Popen, PIPE r = Popen(["pip", "install", "rich"]) r.wait() from rich.console import Console # noqa con = Console() has_conda = "conda version" in os.popen("conda info").read() has_qiime = "QIIME 2 release:" in os.popen("qiime info").read() MINICONDA_PATH = ( "https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh") QIIME_YAML_URL = ( "https://data.qiime2.org/distro/core/qiime2-2021.2-py36-linux-conda.yml") QIIME_YAML = os.path.basename(QIIME_YAML_URL) def cleanup(): """Remove downloaded files.""" if os.path.exists("Miniconda3-latest-Linux-x86_64.sh"): os.remove("Miniconda3-latest-Linux-x86_64.sh") if os.path.exists(QIIME_YAML): os.remove(QIIME_YAML) con.log("Cleaned up unneeded files.")
def unpack_rpm(package_full_path, files, tmp_dir, destdir, exact_files=False): """ Unpacks a single rpm located in tmp_dir into destdir. Arguments: package_full_path - full file system path to the rpm file files - files to extract from the rpm tmp_dir - temporary directory where the rpm file is located destdir - destination directory for the rpm package extraction exact_files - extract only specified files Returns: RETURN_FAILURE in case of a serious problem """ log1("Extracting %s to %s", package_full_path, destdir) log2("%s", files) print(_("Extracting cpio from {0}").format(package_full_path)) unpacked_cpio_path = tmp_dir + "/unpacked.cpio" try: unpacked_cpio = open(unpacked_cpio_path, 'wb') except IOError as ex: print(_("Can't write to '{0}': {1}").format(unpacked_cpio_path, ex)) return RETURN_FAILURE rpm2cpio = Popen(["rpm2cpio", package_full_path], stdout = unpacked_cpio, bufsize = -1) retcode = rpm2cpio.wait() if retcode == 0: log1("cpio written OK") else: unpacked_cpio.close() print(_("Can't extract package '{0}'").format(package_full_path)) return RETURN_FAILURE # close the file unpacked_cpio.close() # and open it for reading unpacked_cpio = open(unpacked_cpio_path, 'rb') print(_("Caching files from {0} made from {1}").format("unpacked.cpio", os.path.basename(package_full_path))) file_patterns = "" cpio_args = ["cpio", "-idu"] if exact_files: for filename in files: file_patterns += "." + filename + " " cpio_args = ["cpio", "-idu", file_patterns.strip()] with open("/dev/null", "w") as null: cpio = Popen(cpio_args, cwd=destdir, bufsize=-1, stdin=unpacked_cpio, stdout=null, stderr=null) retcode = cpio.wait() if retcode == 0: log1("files extracted OK") #print _("Removing temporary cpio file") os.unlink(unpacked_cpio_path) else: print(_("Can't extract files from '{0}'").format(unpacked_cpio_path)) return RETURN_FAILURE
from subprocess import Popen, CREATE_NEW_CONSOLE p_list = [] # Список клиентских процессов while True: user = input("Запустить 3 клиентов (s) / Закрыть клиентов (x) / Выйти (q) ") if user == 'q': break elif user == 's': for _ in range(3): # Флаг CREATE_NEW_CONSOLE нужен для ОС Windows, # чтобы каждый процесс запускался в отдельном окне консоли p_list.append(Popen('python client.py', creationflags=CREATE_NEW_CONSOLE)) print(' Запущено 10 клиентов') elif user == 'x': for p in p_list: p.kill() p_list.clear()
def compile_cfitsio(self): p = Popen("make", shell=True, cwd=self.cfitsio_build_dir) p.wait() if p.returncode != 0: raise ValueError("could not compile cfitsio %s" % self.cfitsio_version)
for colheader in csv_headers: to_replace = '<' + colheader.strip() + '>' replaced_by = row_line[colheader] custom_msg = re.sub(to_replace, replaced_by, custom_msg) for line in custom_msg.splitlines(): line = line + '\n' outtexfh.write(line) outtexfh.close() time.sleep(1) # Run tex cmd cmd = ' '.join([latexmk, dflt_make_opts, ltxcompiler, outtex]) proc = Popen(shlex.split(cmd), stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() rtn = proc.returncode time.sleep(1) # Exit if return code is not zero. if rtn != 0: print('Problem in running "%s" on "%s". Aborting.' % (latexmk, outtex)) sys.exit(1) # Move file src = outpdf dest = os.path.join(pdf_dir, outpdf) move(src, dest)
def create_visdom_connections(self): """If the program could not connect to Visdom server, this function will start a new server at port < self.port > """ cmd = sys.executable + ' -m visdom.server -p %d &>/dev/null &' % self.port print('\n\nCould not connect to Visdom server. \n Trying to start a server....') print('Command: %s' % cmd) Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE)
def save_extension( screen, fname ): ppm_name = fname[:fname.find('.')] + '.ppm' save_ppm( screen, ppm_name ) p = Popen( ['convert', ppm_name, fname ], stdin=PIPE, stdout = PIPE ) p.communicate() remove(ppm_name)
def display( screen ): ppm_name = 'pic.ppm' save_ppm( screen, ppm_name ) p = Popen( ['display', ppm_name], stdin=PIPE, stdout = PIPE ) p.communicate() remove(ppm_name)
def run(commands, tapein, tapeout, input_filename=None, stdout=False, njoy_exec='njoy'): """Run NJOY with given commands Parameters ---------- commands : str Input commands for NJOY tapein : dict Dictionary mapping tape numbers to paths for any input files tapeout : dict Dictionary mapping tape numbers to paths for any output files input_filename : str, optional File name to write out NJOY input commands stdout : bool, optional Whether to display output when running NJOY njoy_exec : str, optional Path to NJOY executable Raises ------ subprocess.CalledProcessError If the NJOY process returns with a non-zero status """ if input_filename is not None: with open(str(input_filename), 'w') as f: f.write(commands) with tempfile.TemporaryDirectory() as tmpdir: # Copy evaluations to appropriates 'tapes' for tape_num, filename in tapein.items(): tmpfilename = os.path.join(tmpdir, 'tape{}'.format(tape_num)) shutil.copy(str(filename), tmpfilename) # Start up NJOY process njoy = Popen([njoy_exec], cwd=tmpdir, stdin=PIPE, stdout=PIPE, stderr=STDOUT, universal_newlines=True) njoy.stdin.write(commands) njoy.stdin.flush() lines = [] while True: # If process is finished, break loop line = njoy.stdout.readline() if not line and njoy.poll() is not None: break lines.append(line) if stdout: # If user requested output, print to screen print(line, end='') # Check for error if njoy.returncode != 0: raise CalledProcessError(njoy.returncode, njoy_exec, ''.join(lines)) # Copy output files back to original directory for tape_num, filename in tapeout.items(): tmpfilename = os.path.join(tmpdir, 'tape{}'.format(tape_num)) if os.path.isfile(tmpfilename): shutil.move(tmpfilename, str(filename))
def gen(pro = Popen([''], shell=True, stdout=PIPE, stdin=PIPE)): while True: frame = pro.stdout.readline().strip() yield (b'--frame\r\n' b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n\r\n')
def show_settings(): """ Shows settings of all of the major components. """ py_str = '%d.%d.%d' % sys.version_info[:3] try: import gi gi.require_version('Gtk', '3.0') from gi.repository import Gtk try: gtkver_str = '%d.%d.%d' % (Gtk.get_major_version(), Gtk.get_minor_version(), Gtk.get_micro_version()) except: # any failure to 'get' the version gtkver_str = 'unknown version' except (ImportError, ValueError): gtkver_str = 'not found' # no DISPLAY is a RuntimeError in an older pygtk (e.g. 2.17 in Fedora 14) except RuntimeError: gtkver_str = 'DISPLAY not set' # exept TypeError: To handle back formatting on version split try: from gi.repository import GObject try: pygobjectver_str = '%d.%d.%d' % GObject.pygobject_version except: # any failure to 'get' the version pygobjectver_str = 'unknown version' except ImportError: pygobjectver_str = 'not found' try: from gi.repository import Pango try: pangover_str = Pango.version_string() except: # any failure to 'get' the version pangover_str = 'unknown version' except ImportError: pangover_str = 'not found' try: import cairo try: pycairover_str = '%d.%d.%d' % cairo.version_info cairover_str = cairo.cairo_version_string() except: # any failure to 'get' the version pycairover_str = 'unknown version' cairover_str = 'unknown version' except ImportError: pycairover_str = 'not found' cairover_str = 'not found' try: from gi import Repository repository = Repository.get_default() if repository.enumerate_versions("OsmGpsMap"): import gi gi.require_version('OsmGpsMap', '1.0') from gi.repository import OsmGpsMap as osmgpsmap try: osmgpsmap_str = osmgpsmap._version except: # any failure to 'get' the version osmgpsmap_str = 'unknown version' else: osmgpsmap_str = 'not found' except ImportError: osmgpsmap_str = 'not found' try: from gi import Repository repository = Repository.get_default() if repository.enumerate_versions("GExiv2"): import gi gi.require_version('GExiv2', '0.10') from gi.repository import GExiv2 try: gexiv2_str = GExiv2._version except: # any failure to 'get' the version gexiv2_str = 'unknown version' else: gexiv2_str = 'not found' except ImportError: gexiv2_str = 'not found' except ValueError: gexiv2_str = 'not new enough' try: import PyICU try: pyicu_str = PyICU.VERSION icu_str = PyICU.ICU_VERSION except: # any failure to 'get' the version pyicu_str = 'unknown version' icu_str = 'unknown version' except ImportError: pyicu_str = 'not found' icu_str = 'not found' try: import bsddb3 as bsddb bsddb_str = bsddb.__version__ bsddb_db_str = str(bsddb.db.version()).replace(', ', '.')\ .replace('(', '').replace(')', '') bsddb_location_str = bsddb.__file__ except: bsddb_str = 'not found' bsddb_db_str = 'not found' bsddb_location_str = 'not found' try: import sqlite3 sqlite3_py_version_str = sqlite3.version sqlite3_version_str = sqlite3.sqlite_version sqlite3_location_str = sqlite3.__file__ except: sqlite3_version_str = 'not found' sqlite3_py_version_str = 'not found' sqlite3_location_str = 'not found' try: from .gen.const import VERSION gramps_str = VERSION except: gramps_str = 'not found' if hasattr(os, "uname"): kernel = os.uname()[2] else: kernel = None lang_str = get_env_var('LANG', 'not set') language_str = get_env_var('LANGUAGE', 'not set') grampsi18n_str = get_env_var('GRAMPSI18N', 'not set') grampshome_str = get_env_var('GRAMPSHOME', 'not set') grampsdir_str = get_env_var('GRAMPSDIR', 'not set') gramps_resources_str = get_env_var('GRAMPS_RESOURCES', 'not set') try: dotversion_str = Popen(['dot', '-V'], stderr=PIPE).communicate(input=None)[1] if isinstance(dotversion_str, bytes) and sys.stdin.encoding: dotversion_str = dotversion_str.decode(sys.stdin.encoding) if dotversion_str: dotversion_str = dotversion_str.replace('\n', '')[23:27] except: dotversion_str = 'Graphviz not in system PATH' try: if win(): try: gsversion_str = Popen(['gswin32c', '--version'], stdout=PIPE).communicate(input=None)[0] except: gsversion_str = Popen(['gswin64c', '--version'], stdout=PIPE).communicate(input=None)[0] else: gsversion_str = Popen(['gs', '--version'], stdout=PIPE).communicate(input=None)[0] if isinstance(gsversion_str, bytes) and sys.stdin.encoding: gsversion_str = gsversion_str.decode(sys.stdin.encoding) if gsversion_str: gsversion_str = gsversion_str.replace('\n', '') except: gsversion_str = 'Ghostscript not in system PATH' os_path = get_env_var('PATH', 'not set') os_path = os_path.split(os.pathsep) print("Gramps Settings:") print("----------------") print(' python : %s' % py_str) print(' gramps : %s' % gramps_str) print(' gtk++ : %s' % gtkver_str) print(' pygobject : %s' % pygobjectver_str) print(' pango : %s' % pangover_str) print(' cairo : %s' % cairover_str) print(' pycairo : %s' % pycairover_str) print(' osmgpsmap : %s' % osmgpsmap_str) print(' GExiv2 : %s' % gexiv2_str) print(' ICU : %s' % icu_str) print(' PyICU : %s' % pyicu_str) print(' o.s. : %s' % sys.platform) if kernel: print(' kernel : %s' % kernel) print('') print("Environment settings:") print("---------------------") print(' LANG : %s' % lang_str) print(' LANGUAGE : %s' % language_str) print(' GRAMPSI18N: %s' % grampsi18n_str) print(' GRAMPSHOME: %s' % grampshome_str) print(' GRAMPSDIR : %s' % grampsdir_str) if __debug__: print(' GRAMPS_RESOURCES : %s' % gramps_resources_str) print(' PYTHONPATH:') for folder in sys.path: print(" ", folder) print('') print("Non-python dependencies:") print("------------------------") print(' Graphviz : %s' % dotversion_str) print(' Ghostscr. : %s' % gsversion_str) print('') print("System PATH env variable:") print("-------------------------") for folder in os_path: print(" ", folder) print('') print("Databases:") print("-------------------------") print(' bsddb :') print(' version : %s' % bsddb_str) print(' db version : %s' % bsddb_db_str) print(' location : %s' % bsddb_location_str) print(' sqlite3 :') print(' version : %s' % sqlite3_version_str) print(' py version : %s' % sqlite3_py_version_str) print(' location : %s' % sqlite3_location_str) print('')
def setUp(self): self.app = Popen("export FLASK_APP=app.py; env/bin/python -m flask run", stdin=PIPE, stdout=DEVNULL, shell=True, preexec_fn=os.setsid) time.sleep(.5)
def verify(self, egg_path, gpg_key=constants.pub_gpg_path): """ Verifies the GPG signature of the egg. The signature is assumed to be in the same directory as the egg and named the same as the egg except with an additional ".asc" extension. returns (dict): {'gpg': if the egg checks out, 'stderr': error message if present, 'stdout': stdout, 'rc': return code} """ # check if the provided files (egg and gpg) actually exist if egg_path and not os.path.isfile(egg_path): the_message = "Provided egg path %s does not exist, cannot verify." % ( egg_path) logger.debug(the_message) return { 'gpg': False, 'stderr': the_message, 'stdout': the_message, 'rc': 1, 'message': the_message } if self.config.gpg and gpg_key and not os.path.isfile(gpg_key): the_message = ("Running in GPG mode but cannot find " "file %s to verify against." % (gpg_key)) logger.debug(the_message) return { 'gpg': False, 'stderr': the_message, 'stdout': the_message, 'rc': 1, 'message': the_message } # if we are running in no_gpg or not gpg mode then return true if not self.config.gpg: return {'gpg': True, 'stderr': None, 'stdout': None, 'rc': 0} # if a valid egg path and gpg were received do the verification if egg_path and gpg_key: cmd_template = '/usr/bin/gpg --verify --keyring %s %s %s' cmd = cmd_template % (gpg_key, egg_path + '.asc', egg_path) logger.debug(cmd) process = Popen(shlex.split(cmd), stdout=PIPE, stderr=PIPE) stdout, stderr = process.communicate() rc = process.returncode logger.debug("GPG return code: %s" % rc) return { 'gpg': True if rc == 0 else False, 'stderr': stderr, 'stdout': stdout, 'rc': rc } else: return { 'gpg': False, 'stderr': 'Must specify a valid core and gpg key.', 'stdout': 'Must specify a valid core and gpg key.', 'rc': 1 }
# list_gadgets.py import sys from subprocess import Popen, PIPE fpath = sys.argv[1] with open(fpath, 'rb') as f: blob = f.read() try: i = -1 while True: i = blob.index('\xc3', i+1) for j in range(4): p1 = Popen(['objdump', '-M', 'intel', '-D', '-b', 'binary', '-m', 'i386', "--start-address=%d" % (i-j-1), "--stop-address=%d" % (i+1), fpath], stdout=PIPE) p2 = Popen(['grep', '^ '], stdin=p1.stdout, stdout=PIPE) stdout, stderr = p2.communicate() if not stdout or '(bad)' in stdout or '<internal disassembler error>' in stdout: continue lines = stdout.splitlines() if lines[-1].endswith('\tret '): print lines[0].split('\t',1)[0] + '\t', print '; \t'.join(line.split('\t')[2] for line in lines[:-1]) except ValueError: pass
def main(): configure_logging() if not common.is_pipenv_present(): training_log.error( 'No virtual environment is accessible by Pipenv from this directory, unable to run mlagents-learn' ) sys.exit(1) argv = get_argvs() args = parse_args(argv) run_id = args.run_id training_info = TrainingRunInfo() command = [ 'pipenv', 'run', 'mlagents-learn', args.trainer_config_path, '--run-id', run_id, ] + args.args try: with Popen( command, stdout=sys.stderr, stderr=PIPE, bufsize=2, universal_newlines=True ) as p: training_log.info(f'{" ".join(command[2:])}') training_log.info('-' * 63) training_log.info(f'Initiating \'{run_id}\'') start_time = time.perf_counter() output_time_remaining = const.GA_INFERENCE not in args.args for line in p.stderr: # Print intercepted line so it is visible in the console line = line.rstrip() print(line) training_info.update_from_training_output(line) if output_time_remaining and training_info.line_has_time_elapsed(line): print( f'Estimated time remaining: {common.get_human_readable_duration(training_info.time_remaining)}' ) except KeyboardInterrupt: training_log.warning('KeyboardInterrupt, aborting') raise finally: training_log.info('-' * 63) if args.export_path: export_brains(training_info.exported_brains, Path(args.export_path)) end_time = time.perf_counter() training_duration = common.get_human_readable_duration(end_time - start_time) training_log.info(f'Training run \'{run_id}\' ended after {training_duration}') if p.returncode == 0: training_log.info('Training completed successfully') else: training_log.warning( f'Training was not completed successfully (error code {p.returncode})' ) training_log.info(f'Final Mean Reward: {training_info.mean_reward}') training_log.info('-' * 63) logging.shutdown()
def gocommand(self): global listen p = Popen('masscan -p80,21 -iL masin.txt --rate=300 -oL masout.txt', stdout = PIPE, stderr = STDOUT, shell = True) while p.poll() is None: line = p.stdout.readline() listen.emit([line]) p = Popen('awk \'{ print $4 }\' masout.txt > nmapin.txt', stdout = PIPE, stderr = STDOUT, shell = True) while p.poll() is None: line = p.stdout.readline() listen.emit([line]) p = Popen('nmap -p 80,21 --script "http-title","ftp-anon" -iL nmapin.txt -oN output.txt', stdout = PIPE, stderr = STDOUT, shell = True) while p.poll() is None: line = p.stdout.readline() listen.emit([line]) p = Popen('python3 nparse.py', stdout = PIPE, stderr = STDOUT, shell = True) while p.poll() is None: line = p.stdout.readline() listen.emit([line])
def input(input): proc = Popen(cmd, stdout=PIPE) res = '' for line in iter(proc.stdout.readline, ''): res = res + line + '\n' return res
def ocr_tesseract_hocr( input_file, output_file, log, pdfinfo, pdfinfo_lock): pageinfo = get_pageinfo(input_file, pdfinfo, pdfinfo_lock) badxml = os.path.splitext(output_file)[0] + '.badxml' args_tesseract = [ 'tesseract', '-l', '+'.join(options.language), input_file, badxml, 'hocr' ] + options.tesseract_config p = Popen(args_tesseract, close_fds=True, stdout=PIPE, stderr=PIPE, universal_newlines=True) try: stdout, stderr = p.communicate(timeout=options.tesseract_timeout) except TimeoutExpired: p.kill() stdout, stderr = p.communicate() # Generate a HOCR file with no recognized text if tesseract times out # Temporary workaround to hocrTransform not being able to function if # it does not have a valid hOCR file. with open(output_file, 'w', encoding="utf-8") as f: f.write(tesseract.HOCR_TEMPLATE.format( pageinfo['width_pixels'], pageinfo['height_pixels'])) else: if stdout: log.info(stdout) if stderr: log.error(stderr) if p.returncode != 0: raise CalledProcessError(p.returncode, args_tesseract) if os.path.exists(badxml + '.html'): # Tesseract 3.02 appends suffix ".html" on its own (.badxml.html) shutil.move(badxml + '.html', badxml) elif os.path.exists(badxml + '.hocr'): # Tesseract 3.03 appends suffix ".hocr" on its own (.badxml.hocr) shutil.move(badxml + '.hocr', badxml) # Tesseract 3.03 inserts source filename into hocr file without # escaping it, creating invalid XML and breaking the parser. # As a workaround, rewrite the hocr file, replacing the filename # with a space. Don't know if Tesseract 3.02 does the same. regex_nested_single_quotes = re.compile( r"""title='image "([^"]*)";""") with open(badxml, mode='r', encoding='utf-8') as f_in, \ open(output_file, mode='w', encoding='utf-8') as f_out: for line in f_in: line = regex_nested_single_quotes.sub( r"""title='image " ";""", line) f_out.write(line)
# open log file log_file_name = base_name + ".log" if os.path.exists(log_file_name): os.remove(log_file_name) logfile = open(log_file_name, 'w') # remove old output files, if any files_to_remove = base_name + ".e" for file in os.listdir(os.getcwd()): if file in files_to_remove: os.remove(file) # run Peridigm command = ["../../../../src/Peridigm", "../"+base_name+".xml"] p = Popen(command, stdout=logfile, stderr=logfile) return_code = p.wait() if return_code != 0: result = return_code # compare output files against gold files command = ["../../../../scripts/exodiff", \ "-stat", \ "-f", \ "../"+base_name+".comp", \ base_name+".e", \ "../"+base_name+"_gold.e"] p = Popen(command, stdout=logfile, stderr=logfile) return_code = p.wait() if return_code != 0: result = return_code
from subprocess import Popen, PIPE for ip in range(1,10): ipAddress = ("192.168.1.",str(ip)) print ("Scanning...",ipAddress) subprocess = Popen(['/bin/ping', '-c 1 ', ipAddress], stdin=PIPE, stdout=PIPE, stderr=PIPE) stdout, stderr= subprocess.communicate(input=None) if "bytes from " in stdout: print ("The Ip Address %s has responded with a ECHO_REPLY!" %(stdout.split()[1])) with open("ips.txt", "a") as myfile: myfile.write(stdout.split()[1]+'\n')
def __module_avail_output(self): avail_command = [self.module_dependency_resolver.modulecmd, 'sh', 'avail'] return Popen(avail_command, stderr=PIPE).communicate()[1]
def get_hp_health(self): try: fans = Popen(['hpasmcli', '-s', 'show fan'], stdout=PIPE).communicate()[0].replace('\n', '<br>') fans = ' '.join(fans.split()).replace(' ', ' | ') temperature = Popen(['hplog', '-t'], stdout=PIPE).communicate()[0].replace( '\n', '<br>') temperature = ' '.join(temperature.split()).replace( 'Normal', '| <strong>Normal</strong> |') temperature = temperature.replace('Basic Sensor', '| Basic Sensor |') power_suply = Popen(['hpasmcli', '-s', 'show powersupply'], stdout=PIPE).communicate()[0].replace( '\n', '<br>') dimms = Popen(['hpasmcli', '-s', 'show dimm'], stdout=PIPE).communicate()[0].replace('\n', '<br>') server_desc = Popen(['hpasmcli', '-s', 'show server'], stdout=PIPE).communicate()[0].replace( '\n', '<br>') #raid = Popen('/root/hp.sh', stdout=PIPE).communicate()[0].replace('\n','<br>') raid = Popen([ 'hpacucli', 'controller', 'slot=0', 'ld', 'all', 'show', 'detail' ], stdout=PIPE).communicate()[0].replace('\n', '<br>') raid += Popen([ 'hpacucli', 'controller', 'slot=0', 'pd', 'all', 'show', 'detail' ], stdout=PIPE).communicate()[0].replace('\n', '<br>') #raid = Popen(' hpacucli ctrl slot=0 pd all show detail', stdin=PIPE, shel l=False, stdout=PIPE).communicate()[0].replace('\n','<br>') #print raid #temperature = Popen('hpasmcli -s "show temp" ', stdin=PIPE, shell=False, stdout=PIPE).communicate()[0] #power_suply = Popen('hpasmcli -s "show powersuply" ', stdin=PIPE, shell=False, stdout=PIPE).communicate()[0] #print fans return fans, temperature, power_suply, dimms, server_desc, raid except: #self.logger.warning("hp_health error...") return "No disponible", "No disponible", "No disponible", "No disponible", "No disponible", "No disponible"
def tag_sents(self, sentences): """ Applies the tag method over a list of sentences. This method will return a list of dictionaries. Every dictionary will contain a word with its calculated annotations/tags. """ encoding = self._encoding if not path.isfile(self.executable(self._path)): raise OSError("Senna executable expected at %s but not found" % self.executable(self._path)) # Build the senna command to run the tagger _senna_cmd = [ self.executable(self._path), '-path', self._path, '-usrtokens', '-iobtags' ] _senna_cmd.extend(['-' + op for op in self.operations]) # Serialize the actual sentences to a temporary string _input = '\n'.join((' '.join(x) for x in sentences)) + '\n' if isinstance(_input, text_type) and encoding: _input = _input.encode(encoding) # Run the tagger and get the output p = Popen(_senna_cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE) (stdout, stderr) = p.communicate(input=_input) senna_output = stdout # Check the return code. if p.returncode != 0: raise RuntimeError('Senna command failed! Details: %s' % stderr) if encoding: senna_output = stdout.decode(encoding) # Output the tagged sentences map_ = self._map() tagged_sentences = [[]] sentence_index = 0 token_index = 0 for tagged_word in senna_output.strip().split("\n"): if not tagged_word: tagged_sentences.append([]) sentence_index += 1 token_index = 0 continue tags = tagged_word.split('\t') result = {} for tag in map_: result[tag] = tags[map_[tag]].strip() try: result['word'] = sentences[sentence_index][token_index] except IndexError: raise IndexError( "Misalignment error occurred at sentence number %d. Possible reason" " is that the sentence size exceeded the maximum size. Check the " "documentation of Senna class for more information." % sentence_index) tagged_sentences[-1].append(result) token_index += 1 return tagged_sentences
import sys from subprocess import Popen from itertools import islice from time import sleep if __name__ == '__main__': algo_prefix, start_task, end_task, parallelism = sys.argv[1], int(sys.argv[2]), int(sys.argv[3]), int(sys.argv[4]) buildprocess = Popen("dotnet build -c Release console-runner/console-runner.csproj", shell=True) buildprocess.wait() if buildprocess.returncode != 0: exit(buildprocess.returncode) cmd = "dotnet console-runner/bin/Release/netcoreapp2.2/console-runner.dll solve -s %s -p %d" processes = (Popen(cmd % (algo_prefix, task_num), shell=True) for task_num in range(start_task, end_task + 1)) running_processes = list(islice(processes, parallelism)) # start new processes while running_processes: for i, process in enumerate(running_processes): if process.poll() is not None: # the process has finished running_processes[i] = next(processes, None) # start new process if running_processes[i] is None: # no new processes del running_processes[i] break sleep(1)
src_crunch_name = unsuffixed(src_dds_name) + CRUNCH_OUTPUT_SUFFIX # Preload/embed the .crn version instead of the .dds version, but use the .dds suffix for the target file in the virtual FS. file_['srcpath'] = src_crunch_name try: # Do not crunch if crunched version exists and is more recent than dds source crunch_time = os.stat(src_crunch_name).st_mtime dds_time = os.stat(src_dds_name).st_mtime if dds_time < crunch_time: continue except: pass # if one of them does not exist, continue on # guess at format. this lets us tell crunch to not try to be clever and use odd formats like DXT5_AGBR try: format = Popen(['file', file_['srcpath']], stdout=PIPE).communicate()[0] if 'DXT5' in format: format = ['-dxt5'] elif 'DXT1' in format: format = ['-dxt1'] else: raise Exception('unknown format') except: format = [] Popen([CRUNCH, '-outsamedir', '-file', src_dds_name, '-quality', crunch] + format, stdout=sys.stderr).communicate() #if not os.path.exists(os.path.basename(crunch_name)): # print >> sys.stderr, 'Failed to crunch, perhaps a weird dxt format? Looking for a source PNG for the DDS' # Popen([CRUNCH, '-file', unsuffixed(file_['srcpath']) + '.png', '-quality', crunch] + format, stdout=sys.stderr).communicate() assert os.path.exists(src_crunch_name), 'crunch failed to generate output' # prepend the dds header crunched = open(src_crunch_name, 'rb').read()
args.extend(additional_args) else: args.extend(additional_args.split()) args.insert(0, command) # Command has to go first args.append(host) # Host should be last if password: # Create a temporary script to use with SSH_ASKPASS temp = tempfile.NamedTemporaryFile(delete=False) os.chmod(temp.name, 0o700) temp.write(('#!/bin/sh\necho "{0}"\n'.format(password)).encode('utf-8')) temp.close() env['SSH_ASKPASS'] = temp.name env['DISPLAY'] = ':9999' # TODO: Get this using the user's actual X11 # This removes the temporary file in a timely manner from subprocess import Popen Popen("sleep 15 && /bin/rm -f %s" % temp.name, shell=True) # 15 seconds should be enough even for slow connections/servers # It's a tradeoff: Lower number, more secure. Higher number, less # likely to fail script_path = None if 'GO_TERM' in os.environ.keys(): term = os.environ['GO_TERM'] location = os.environ['GO_LOCATION'] if socket: # Emit our special optional escape sequence to tell ssh.py the path # to the SSH socket print("\x1b]_;ssh|set;ssh_socket;{0}\007".format(socket)) if 'GO_SESSION_DIR' in os.environ.keys(): # Save a file indicating our session is attached to GO_TERM ssh_session = 'ssh:%s:%s:%s@%s:%s' % ( location, term, user, host, port)
acks_second = [] #runRx = True #DELAYS = range(min(int(START),int(STOP)),max(int(STOP),int(START))+int(STEP),int(STEP)) #for gain in STEPS: import numpy as np print(STEPS) print("channel "+str(channel)) #for gain in np.arange(-4.0, 0.1, 0.25): for vary in STEPS: # gain = START # while True: #gain = vary print("CURRENT STEP "+str(vary)) #gain =10 # relative 5 translates to 0.5 or 50% power if exp == 'rx2rx': subp = Popen(['python3','zig_rx2rx_time.py','-t {}'.format(gain),'-f {}'.format(frequency)]) #, stdout=PIPE, stderr=PIPE) gnuradio_set_vars(tx_gain=gain) print('Gain:', gnuradio_get_vars('tx_gain')) test(file,length,vary,second=True) if exp == 'capt': subp = Popen(['python3','zig_rx2rx_agc.py','-t {}'.format(gain),'-f {}'.format(frequency)]) gnuradio_set_vars(gain_first=GAIN_FIRST) gnuradio_set_vars(gain_second=GAIN_SECOND) gnuradio_set_vars(channel=channel) print('Gain first:', gnuradio_get_vars('gain_first')) print('Gain second:', gnuradio_get_vars('gain_second')) print('Channel:', gnuradio_get_vars('channel')) test(file,length,vary,second=True) elif exp == 'sens': subp = Popen(['python3','zig_sens.py','-t {}'.format(gain),'-f {}'.format(frequency)]) gnuradio_set_vars(tx_gain=vary)
def worker(): """ Run the huey worker """ comm=['huey_consumer.py', '-w', '4', '--logfile', 'tasks.log', 'base.async.tasks.huey'] Popen(comm).communicate()