def getPkgAct(self, path): #Use aapt to fetch pakage name and main activity from apk file stdout = Popen(config.aapt+' dump badging '+path+' | awk -F" " \'/package/ {print $2}\'|awk -F"\'" \'/name=/ {print $2}\'', shell=True, stdout=PIPE).stdout pkg = stdout.read() stdout = Popen(config.aapt+' dump badging '+path+' | awk -F" " \'/launchable-activity/ {print $2}\'|awk -F"\'" \'/name=/ {print $2}\'', shell=True, stdout=PIPE).stdout act = stdout.read() return pkg, act
def __init__(self, setupsDir=DEFAULT_SETUPS_DIR): # initial setup of ups itself: os.environ['UPS_SHELL'] = 'sh' # initial setup of ups itself: os.environ['UPS_SHELL'] = 'sh' if POPEN_AVAILABLE: f = Popen('. %s/setups.sh; ' % setupsDir + \ 'echo os.environ\\[\\"UPS_DIR\\"\\]=\\"${UPS_DIR}\\"; ' + \ 'echo os.environ\\[\\"PRODUCTS\\"\\]=\\"${PRODUCTS}\\";' + \ 'echo os.environ\\[\\"SETUP_UPS\\"\\]=\\"${SETUP_UPS}\\";' + \ 'echo os.environ\\[\\"PYTHONPATH\\"\\]=\\"${PYTHONPATH}\\";', shell=True, stdout=PIPE).stdout else: f = os.popen('. %s/setups.sh; ' % setupsDir + \ 'echo os.environ\\[\\"UPS_DIR\\"\\]=\\"${UPS_DIR}\\"; ' + \ 'echo os.environ\\[\\"PRODUCTS\\"\\]=\\"${PRODUCTS}\\";' + \ 'echo os.environ\\[\\"SETUP_UPS\\"\\]=\\"${SETUP_UPS}\\";' + \ 'echo os.environ\\[\\"PYTHONPATH\\"\\]=\\"${PYTHONPATH}\\";') exec f.read() f.close() # we need to initialize the following so that we can # make the correct changes to sys.path later when products # we setup modify PYTHONPATH self._pythonPath = os.environ.get('PYTHONPATH', '') self._sysPath = sys.path (self._internalSysPathPrepend, self._internalSysPathAppend) = self._getInitialSyspathElements()
def checkEnclosureTemp(self): if self._settings.get(["dhtModel"]) == 1820 or self._settings.get(["dhtModel"]) == '1820': stdout = Popen("sudo "+self._settings.get(["getTempScript"])+" "+str(self._settings.get(["dhtModel"])), shell=True, stdout=PIPE).stdout else: stdout = Popen("sudo "+self._settings.get(["getTempScript"])+" "+str(self._settings.get(["dhtModel"]))+" "+str(self._settings.get(["dhtPin"])), shell=True, stdout=PIPE).stdout sTemp = stdout.read() sTemp.replace(" ", "") if sTemp.find("Failed") != -1: self._logger.info("Failed to read Temperature") else: #self._logger.info(sTemp) self.enclosureCurrentTemperature = self.toFloat(sTemp) #self._logger.info("enclosureCurrentTemperature is: %s",self.enclosureCurrentTemperature) if self._settings.get(["dhtModel"]) != '1820': stdout = Popen("sudo "+self._settings.get(["getHumiScript"])+" "+str(self._settings.get(["dhtModel"]))+" "+str(self._settings.get(["dhtPin"])), shell=True, stdout=PIPE).stdout sHum = stdout.read() sHum.replace(" ", "") if sHum.find("Failed") != -1: self._logger.info("Failed to read Humidity") else: self._logger.info(sHum) self.enclosureCurrentHumidity = self.toFloat(sHum) self._plugin_manager.send_plugin_message(self._identifier, dict(enclosuretemp=self.enclosureCurrentTemperature,enclosureHumidity=self.enclosureCurrentHumidity)) self.heaterHandler()
def dataLogging(): #Open Log File f=open('tempdata.txt','a') now = datetime.datetime.now() timestamp = now.strftime("%Y/%m/%d,%H:%M:%S") cowName = cow() config = ConfigParser.ConfigParser() config.read("/home/pi/config.ini") pipe1 = Popen('sudo ./2readADC', shell=True, stdout=PIPE).stdout weight = pipe1.read() # .communicate() # exit_code = weight.wait() cmd = ['sudo sh /home/pi/showTemp.sh'] pipe2 = Popen(cmd, shell=True, stdout=PIPE).stdout temperature = pipe2.read() outstring = str(timestamp)+","+weight+ ","+temperature+"\n" f.write(outstring) f.close() print outstring Popen('sudo bw_tool -I -D /dev/i2c-1 -a 94 -w 10:0', shell=True) Popen('sudo bw_tool -I -D /dev/i2c-1 -a 94 -r 17 -v 0', shell=True) Popen('sudo bw_tool -I -D /dev/i2c-1 -a 94 -t "Recorded"', shell=True) time.sleep(2) Popen('sudo python /home/pi/menu.py dlTopSelect.mnu', shell=True) sys.exit()
def runMatch(self,playerOne, playerTwo,gameSeed) : try : inline= Popen("./QtSpimbot -file "+playerOne+" -file2 "+playerTwo + " -randomseed "+gameSeed+ " -randommap -tournament -run -exit_when_done -maponly -quiet ",\ stdout=PIPE, shell=True).stdout string = "not" while(not (string == '')) : string = inline.readline() if string[:7] == "winner:" : return string[8:-1] print "\nerror, What? This should not be so? Did you quit qtSpim?" return self.manual_override(playerOne,playerTwo,gameSeed) except KeyboardInterrupt: return self.manual_override(playerOne,playerTwo,gameSeed) except Alarm: print "timeOut" killerror= Popen("killall QtSpimbot", stdout=PIPE, shell=True).stdout print killerror.read() time.sleep(1) return "#fail#"
def check(self, logger, agentConfig): """Return a dictionary of metrics Or False to indicate that there are no data to report""" logger.debug('Cassandra: start') try: # How do we get to nodetool nodetool = agentConfig.get("cassandra_nodetool", None) if nodetool is None: return False else: if not os.path.exists(nodetool) or not os.path.isfile(nodetool): logger.warn("Cassandra's nodetool cannot be found at %s" % (nodetool,)) return False # Connect to what? cassandra_host = agentConfig.get("cassandra_host", None) if cassandra_host is None: if nodetool is not None: cassandra_host = "localhost" logger.info("Nodetool is going to assume %s" % (cassandra_host)) else: return False # A specific port, assume 8080 if none is given cassandra_port = agentConfig.get("cassandra_port", None) if cassandra_port is None: if nodetool is not None: cassandra_port = 8080 logger.info("Nodetool is going to assume %s" % (cassandra_port)) else: return False nodetool_cmd = "%s -h %s -p %s" % (nodetool, cassandra_host, cassandra_port) logger.debug("Connecting to cassandra with: %s" % (nodetool_cmd,)) bufsize = -1 results = {} # nodetool info pipe = Popen("%s %s" % (nodetool_cmd, "info"), shell=True, universal_newlines=True, bufsize=bufsize, stdout=PIPE, stderr=None).stdout self._parseInfo(pipe.read(), results, logger) logger.debug("Cassandra info: %s" % results) pipe.close() # nodetool cfstats pipe = Popen("%s %s" % (nodetool_cmd, "cfstats"), shell=True, universal_newlines=True, bufsize=bufsize, stdout=PIPE, stderr=None).stdout self._parseCfstats(pipe.read(), results) pipe.close() # nodetool tpstats pipe = Popen("%s %s" % (nodetool_cmd, "tpstats"), shell=True, universal_newlines=True, bufsize=bufsize, stdout=PIPE, stderr=None).stdout self._parseTpstats(pipe.read(), results) pipe.close() return results except Exception, e: logger.exception(e) return False
def get_versions(): """ Try to find out the versions of gcc, ld and dllwrap. If not possible it returns None for it. """ from distutils.version import LooseVersion from distutils.spawn import find_executable import re gcc_exe = os.environ.get('CC') or find_executable('gcc') ld_exe = find_executable('ld') out = Popen(gcc_exe+' --print-prog-name ld', shell=True, stdout=PIPE).stdout try: ld_exe = str(out.read()).strip() finally: out.close() if gcc_exe: out = os.popen(gcc_exe + ' -dumpversion','r') out_string = out.read() out.close() result = re.search('(\d+\.\d+(\.\d+)*)',out_string) if result: gcc_version = LooseVersion(result.group(1)) else: gcc_version = None else: gcc_version = None if ld_exe: out = os.popen(ld_exe + ' -v','r') out_string = out.read() out.close() result = re.search('(\d+\.\d+(\.\d+)*)',out_string) if result: ld_version = LooseVersion(result.group(1)) else: ld_version = None else: ld_version = None dllwrap_exe = os.environ.get('DLLWRAP') or find_executable('dllwrap') if dllwrap_exe: out = os.popen(dllwrap_exe + ' --version','r') out_string = out.read() out.close() result = re.search(' (\d+\.\d+(\.\d+)*)',out_string) if result: dllwrap_version = LooseVersion(result.group(1)) else: dllwrap_version = None else: dllwrap_version = None return (gcc_version, ld_version, dllwrap_version)
def git_version(path): from subprocess import Popen, PIPE, STDOUT cmd = 'git --git-dir=' + path + '.git log --pretty=format:%h -n1' p = Popen(cmd, shell=True, stdout=PIPE).stdout version = p.read() p.close() return version
def getSimilarWords(word): f1 = open("C:\IRProject\dictionary.bat", "w") f1.write("@ECHO OFF") f1.write("\n") f1.write( "C:\IRProject\\bin\wget.exe -qO- http://dictionary.reference.com/browse/" + word + '?s=t | findstr "data-syllable"' ) f1.close() stdout = Popen("C:\IRProject\dictionary.bat", shell=True, stdout=PIPE).stdout output = stdout.read() f = open("C:\IRProject\dictionary.txt", "w") f.write(output) f.close() f = open("C:\IRProject\dictionary.txt") lines = f.readlines() f.close() word_POS = {} # print len(lines) if len(lines) >= 2: x = lines[1].replace(" ", "") s = re.split("<|>|=", x) count = 0 for i in s: if i == '"secondary-bf"data-syllable': word_POS[s[count + 2].split(",")[0]] = s[count + 7] count = count + 1 return word_POS else: return word_POS
def dependencies(sentence): os.popen("echo " + sentence + " > C:\IRProject\stanfordtemp.txt") stdout = Popen("C:\IRProject\god.bat", shell=True, stdout=PIPE).stdout output = stdout.read() output1 = re.findall(r"[\w']+", output) # print output1 sentence_split = sentence.split(".")[0].split() count1 = 0 count = 0 for s in sentence_split: depen[count1] = [s, {}] for o in range(count, len(output1)): if output1[o] == s: break count = count + 1 count1 = count1 + 1 count = count + 1 for o in range(0, count): if output1[o] in sentence_split: sentence_POS[output1[o]] = output1[o - 1] for o in xrange(count, len(output1) - 1, 5): if output1[o] != "root": if output1[o] == "neg": negations[int(output1[o + 2]) - 1] = output1[o + 1] try: depen.get(int(output1[o + 2]) - 1)[1][int(output1[o + 4]) - 1] = output1[o + 3] depen.get(int(output1[o + 4]) - 1)[1][int(output1[o + 2]) - 1] = output1[o + 1] except: return {} return depen
def check_noun(sentence): os.popen("echo " + sentence + " > C:\IRProject\stanfordtemp.txt") stdout = Popen("C:\IRProject\god.bat", shell=True, stdout=PIPE).stdout output = stdout.read() output1 = re.findall(r"[\w']+", output) words = sentence.split(".")[0].split() count = 0 sent_position = 0 answer = {} for s in words: for o in range(count + 1, len(output1)): if output1[o] != "ROOT" and s == output1[o]: if output1[o - 1] == "NN" or output1[o - 1] == "NNS": if s in list_of_related_terms: adj = getNounAdj(s, sent_position) answer[sent_position] = [s, adj] if output1[o - 1] == "NNP" or output1[o - 1] == "NNPS": adj = getNounAdj(s, sent_position) answer[sent_position] = [s, adj] count = count + 1 break count = count + 1 sent_position = sent_position + 1 return answer
def checkSocat(self): stdout = Popen('(ps caux | grep socat)', shell=True, stdout=PIPE).stdout stdout = str(stdout.read()).replace("b''",'') if stdout != '': return 1 else: return 0
def checkTor(self): stdout = Popen('ps aux | grep torrc | grep -v grep', shell=True, stdout=PIPE).stdout stdout = str(stdout.read()).replace("b''",'') if stdout != '': return 1 else: return 0
def executeCommandAndArguments(caa, dumpfilepath): result = Popen(caa, stdout=PIPE).stdout dumpfile = open(dumpfilepath, 'w') dumpfile.write( result.read() ) dumpfile.close() result.close()
def _inhaleresults(self, cmd): if POPEN_AVAILABLE: p = Popen(cmd, shell=True, stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=True) (stdin, stdout, stderr) = (p.stdin, p.stdout, p.stderr) else: (stdin,stdout,stderr) = os.popen3(cmd) try: filename = stdout.read() filename = filename[0:-1] if (filename == "/dev/null"): msg = stderr.read() raise upsException(msg) finally: stdin.close() stdout.close() stderr.close() cutHere = '--------------cut here-------------' setup = open(filename, 'a') setup.write(self._getNewPythonEnv(cutHere)) setup.close() if POPEN_AVAILABLE: f = Popen("/bin/sh %s" % filename, shell=True, stdout=PIPE).stdout else: f = os.popen("/bin/sh %s" % filename) c1 = f.read() f.close() (realUpsStuff, ourOsEnvironmentStuff) = re.split('.*%s' % cutHere, c1) #print("ourOsEnvironmentStuff = %s" % ourOsEnvironmentStuff) exec ourOsEnvironmentStuff
def run(self, edit): selections = self.view.sel() # For now we don't try to balance multiple or non-empty selections, just insert as usual if len(selections) > 1: for selection in reversed(selections): self.insert(edit, selection) return selection = selections[0] if not selection.empty(): self.insert(edit, selection) return point = selection.end() line = self.view.line(point) os.environ["TM_CURRENT_LINE"] = self.view.substr(line).encode("utf-8") os.environ["TM_LINE_INDEX"] = unicode(self.view.rowcol(point)[1]) os.environ["TM_SUPPORT_PATH"] = os.getcwd().encode("utf-8") pipe = Popen(["ruby", os.path.join(self.package_path(), self.PARSER_PATH)], shell=False, stdout=PIPE, stderr=STDOUT).stdout snippet = pipe.read() pipe.close() self.view.erase(edit, line) self.view.run_command("insert_snippet", {"contents": unicode(snippet, "utf-8")})
def applicationCollectMac(): #Developed the OS X function first because I have a Mac! appArray = [] # Execute system profiler appCollect = Popen (["system_profiler", "-detailLevel", "full", "SPApplicationsDataType", "-xml"], stdout = PIPE).communicate()[0] # appCollect = open("platform_sample_files/osx_sample_system_profiler_output.xml") # Run sample profiler output as the system_profileer command is a little slow xmlApp = appCollect.read() xmlTree = etree.parse(StringIO(xmlApp)) xmlContext = etree.iterparse(StringIO(xmlApp)) xmlRoot = xmlTree.getroot() for eachItem in xmlRoot: # This cascade isn't pretty and needs cleanup! for eachItem in eachItem: for eachItem in eachItem: for eachItem in eachItem: if eachItem.tag == "dict": appDict = {} for eachItem in eachItem: if eachItem.tag == "key": tagKey = eachItem.text else: tagText = eachItem.text try: if tagText and tagKey: appDict[str(tagKey)]= str(tagText) except: pass appArray.append(appDict) return appArray
def _get_fields(self, msg_path, length=50): cmd = [ 'mu', 'view', msg_path, '--summary-len=5', '--muhome=%s' % config.HIPFLASK_FOLDERS['mu'] ] p = Popen(cmd, stdout=PIPE).stdout fields = p.read().splitlines() p.close() message = {} for field in fields: separator = field.find(':') if separator == -1: continue key = field[0:separator].lower() value = field[separator+2:] if len(value) > length: value = value[:length] + '...' message[key] = value return message
def collect(self, parms): try: #collected='' #- get the data collected='' string="ps aux| awk '{print $1,$2,$11}'" from subprocess import Popen, PIPE stdout = Popen(string, shell=True, stdout=PIPE).stdout output = stdout.read() #root 1 /sbin/init #root 2 [kthreadd] #root 3 [ksoftirqd/0] #root 5 [kworker/u:0] #root 6 [migration/0] for line in output.split("\n"): collected+=line if(line==''): continue word=line.split() self.cur_data['data']['parsed'][word[2]]=word[1] checkval = hashlib.md5(collected).hexdigest() self.cur_data['md5'] = checkval self.cur_data['data']['raw'] = collected return True #- if issues, handle them here except: self.cur_data['md5'] = '' self.cur_data['data'] = '' return False
def flushev(): import seiscomp3.DataModel, seiscomp3.IO import os, sys import bltools cfg=bltools.get_config() # data pathes # Read Origin from stdin ar = seiscomp3.IO.BinaryArchive() if not ar.open("-"): # Hmmm, opening stdin failed print 'err input '# + fout sys.exit(1) # Read back into an 'origin' object obj = ar.readObject() ar.close() org = seiscomp3.DataModel.Origin.Cast(obj) # No origin -> error if not org: sys.exit(1) # Export origin from database idorg = org.publicID() # extract associated event name from db try: from subprocess import Popen, PIPE sql_req = 'seiscomp exec scquery -d '+ cfg['database']['host'] + ' ' + cfg['database']['getevent_query'] + ' ' + idorg stdout = Popen(sql_req, shell=True, stdout=PIPE).stdout idev = stdout.read().strip() # strip removes trailing line feeds except Exception, exception: idev=''
def generate_changelog (): from subprocess import Popen, PIPE from tempfile import mkstemp fd, filename = mkstemp (dir=os.getcwd ()) print "Creating ChangeLog" cmd = ["git", "log", "-M", "-C", "--name-status", "--date=short", "--no-color"] pipe = Popen (cmd, stdout=PIPE).stdout buff = pipe.read (1024) while buff: os.write (fd, buff) buff = pipe.read (1024) os.close (fd) os.rename (filename, "ChangeLog")
def render_doc_html(path): '''Render the HTML for the doc content''' if exists(path+'.md'): script = ['pandoc', '-t', 'html', path+'.md'] output = Popen(script, stdout=PIPE).stdout return output.read().decode(encoding='UTF-8') else: return ("Path NOT found "+path)
def check_tor(self): #Tor is running? stdout = Popen('ps aux | grep torrc | grep -v grep', shell=True, stdout=PIPE).stdout stdout = str(stdout.read()).replace("b''",'') if not stdout: return 0 else: return 1
def check_socat(self): #Socat is running? stdout = Popen('(ps caux | grep socat)', shell=True, stdout=PIPE).stdout stdout = str(stdout.read()).replace("b''",'') if not stdout: return 0 else: return 1
def getHighestPid(self): """ Gets the highest pid of all application processes """ pipe = Popen('pgrep %s' % self.command, shell=True, stdout=PIPE).stdout # returns the highest pgreped pid try: return int(pipe.read().split()[-1]) except IndexError: return None
def checkEnclosureTemp(self): stdout = Popen("sudo "+self._settings.get(["getTempScript"])+" "+str(self._settings.get(["dhtModel"]))+" "+str(self._settings.get(["dhtPin"])), shell=True, stdout=PIPE).stdout sTemp = stdout.read() if sTemp.find("Failed") != -1: self._logger.info("Failed to read Temperature") else: self.enclosureCurrentTemperature = float(sTemp) stdout = Popen("sudo "+self._settings.get(["getHumiScript"])+" "+str(self._settings.get(["dhtModel"]))+" "+str(self._settings.get(["dhtPin"])), shell=True, stdout=PIPE).stdout sTemp = stdout.read() if sTemp.find("Failed") != -1: self._logger.info("Failed to read Humidity") else: self.enclosureCurrentHumidity = float(sTemp) self._plugin_manager.send_plugin_message(self._identifier, dict(enclosuretemp=self.enclosureCurrentTemperature,enclosureHumidity=self.enclosureCurrentHumidity)) self.heaterHandler()
def try_report(cr, uid, rname, ids, data=None, context=None, our_module=None): """ Try to render a report <rname> with contents of ids This function should also check for common pitfalls of reports. """ if data is None: data = {} if context is None: context = {} if rname.startswith('report.'): rname_s = rname[7:] else: rname_s = rname _logger.log(netsvc.logging.TEST, " - Trying %s.create(%r)", rname, ids) res = netsvc.LocalService(rname).create(cr, uid, ids, data, context) if not isinstance(res, tuple): raise RuntimeError("Result of %s.create() should be a (data,format) tuple, now it is a %s" % \ (rname, type(res))) (res_data, res_format) = res if not res_data: raise ValueError("Report %s produced an empty result!" % rname) if tools.config['test_report_directory']: file(os.path.join(tools.config['test_report_directory'], rname+ '.'+res_format), 'wb+').write(res_data) _logger.debug("Have a %s report for %s, will examine it", res_format, rname) if res_format == 'pdf': if res_data[:5] != '%PDF-': raise ValueError("Report %s produced a non-pdf header, %r" % (rname, res_data[:10])) res_text = False try: fd, rfname = tempfile.mkstemp(suffix=res_format) os.write(fd, res_data) os.close(fd) fp = Popen(['pdftotext', '-enc', 'UTF-8', '-nopgbrk', rfname, '-'], shell=False, stdout=PIPE).stdout res_text = tools.ustr(fp.read()) os.unlink(rfname) except Exception: _logger.debug("Unable to parse PDF report: install pdftotext to perform automated tests.") if res_text is not False: for line in res_text.split('\n'): if ('[[' in line) or ('[ [' in line): _logger.error("Report %s may have bad expression near: \"%s\".", rname, line[80:]) # TODO more checks, what else can be a sign of a faulty report? elif res_format == 'foobar': # TODO pass else: _logger.warning("Report %s produced a \"%s\" chunk, cannot examine it", rname, res_format) return False _logger.log(netsvc.logging.TEST, " + Report %s produced correctly.", rname) return True
def _TestConf(self): f = Popen((options.nginx_bin_path+'nginx', '-t'), stderr=PIPE).stderr err = f.read() regex = re.compile('test is successful') ret = re.search(regex, err) if ret : return True else: return False
def pkg_config(what): bufsize, pkg_config_cmd = 1024, "%s --cflags --libs %s" % (pkg_config.path, what) f = Popen(pkg_config_cmd, shell=True, bufsize=bufsize, stdout=PIPE).stdout data = f.read().strip().split() ret = f.close() if ret: if ret/256: data = [] if ret/256 > 1: raise EnvironmentError("%s not found" % (pkg_config.path,)) return data
def run_command(self, inst, cmd, obj={}): env = self._env(inst) cmd = 'ssh {user}@{host} {opts} {cmd}'.format( user = env.user, host = env.hosts[0], opts=self._ssh_opts_str(env), cmd=cmd ) pipe = Popen([cmd], shell=True, stdout=PIPE, close_fds=True).stdout return pipe.read(4098)
def mousecon(): """Runs function for mouse control""" def repeat1(begin, unmute, last, hold, beginhold): """actual function for moving and clicking mouse""" def click_down(): """Simulates a down click""" fake_input(d, ButtonPress, 1) d.sync() def click_up(): """Simulates an up click""" fake_input(d, ButtonRelease, 1) d.sync() #captures input frame frame = cv.QueryFrame(capture) #initializes mouse behavior d = Display() s = d.screen() root = s.root #creates horizontally flipped copy of input frame to work with cv.Copy(frame, sframe) cv.Flip(sframe, sframe, 1) #makes mask of skintones dog = skin(sframe, ccolor) #inverts skintone mask to all non-skin areas cv.ConvertScale(dog, dog, -1, 255) #makes greyscale copy of frame cv.CvtColor(sframe, grey, cv.CV_BGR2GRAY) #replaces nonskin areas with white cv.Add(grey, white, grey, dog) #implements laplacian edge detection on greyscale image dst_16s2 = cv.CreateImage(cv.GetSize(bg), cv.IPL_DEPTH_16S, 1) cv.Laplace(grey, dst_16s2, 5) cv.Convert(dst_16s2, grey) #creates a threshold to binarize the image cv.Threshold(grey, grey, 75, 255, cv.CV_THRESH_BINARY) #creates contours on greyscale image storage = cv.CreateMemStorage(0) contours = cv.FindContours(grey, storage, cv.CV_RETR_TREE, cv.CV_CHAIN_APPROX_SIMPLE) #sets final display frame background to black cv.Set(cframe, 0) #sets minimum range for object detection mx = 20000 #initializes hand position to previous best = last #creates some cvSeq maxcont by copying contours maxcont = contours #goes through all contours and finds bounding box while contours: bound_rect = cv.BoundingRect(list(contours)) #if bounding box area is greater than min range or current max box if bound_rect[3] * bound_rect[2] > mx: #sets max to current object, creates position at center of box, and sets display contour to current mx = bound_rect[3] * bound_rect[2] maxcont = contours #goes to next contour contours = contours.h_next() #draws largest contour on final frame cv.DrawContours(cframe, maxcont, 255, 127, 0) if maxcont: #draws and finds convex hull and convexity defects chull = cv.ConvexHull2(maxcont, storage, cv.CV_CLOCKWISE, 1) cv.PolyLine(cframe, [chull], 1, 255) chulllist = list(chull) chull = cv.ConvexHull2(maxcont, storage, cv.CV_CLOCKWISE, 0) cdefects = cv.ConvexityDefects(maxcont, chull, storage) #filters smaller convexity defects and displays larger ones truedefects = [] for j in cdefects: if j[3] > 30: truedefects.append(j) cv.Circle(cframe, j[2], 6, 255) #Finds highest point of convex hull if hand follows smooth vertical shape if cdefects and len(truedefects) < 4: tipheight = 481 tiploc = 0 for j in chulllist: if j[1] < tipheight: tipheight = j[1] tiploc = chulllist.index(j) best = chulllist[tiploc] #if hand is open, begin click if len(truedefects) >= 4: if beginhold == 0: beginhold = time.time() else: #if .05 seconds have passed, clicks down if (time.time() - beginhold > .05) and not hold: hold = True beginhold = 0 click_down() #unclicks if hand returns to smooth else: if hold: click_up() hold = False beginhold = 0 #keeps last position if movement too quick, or smooths slower movement xdiff = best[0] - last[0] ydiff = best[1] - last[1] dist = math.sqrt(xdiff**2 + ydiff**2) if dist > 100: best = last else: best = (last[0] + xdiff * .75, last[1] + ydiff * .75) #displays main position circle cv.Circle(cframe, (int(best[0]), int(best[1])), 20, 255) #displays image with contours cv.ShowImage("w2", cframe) cv.MoveWindow('w2', 500, 0) #delay between frame capture c = cv.WaitKey(10) #Mouse Move/ Bottom Pointer Dx, Dy = mousedelta(last, best) root.warp_pointer((best[0] - 320) * 1600 / 600 + 800, best[1] * 900 / 360) d.sync() return (begin, unmute, best, hold, beginhold) def mousedelta(pos1, pos2): """finds difference between current mouse position and last""" x0 = pos1[0] y0 = pos1[1] x = pos2[0] y = pos2[1] dx, dy = x - x0, y - y0 Dx = dx Dy = dy return Dx, Dy #button.configure(text = "Volume Control", command=volcon) #Systemname True= Linux False= Mac stdout = Popen('uname -a', shell=True, stdout=PIPE).stdout systemname = stdout.read() sysname = True if 'Mac' in systemname: sysname = False else: sysname = True #goes through all potential cameras to choose working camera for i in range(3): capture = cv.CaptureFromCAM(i) if capture: break #takes initihttp://code.activestate.com/recipes/578104-openkinect-mouse-control-using-python/al picture of background bg = cv.QueryFrame(capture) #creates static white image white = cv.CreateImage(cv.GetSize(bg), 8, 1) cv.Set(white, 255) #creates temporary variables sframe = cv.CreateImage(cv.GetSize(bg), 8, 3) thehandcolor = cv.CreateImage(cv.GetSize(bg), 8, 3) flipped = cv.CreateImage(cv.GetSize(bg), 8, 3) dog = cv.CreateImage(cv.GetSize(bg), 8, 1) grey = cv.CreateImage(cv.GetSize(bg), 8, 1) cframe = cv.CreateImage(cv.GetSize(bg), 8, 1) #initializes variables for motion start time, mute status, and previous hand position begin = 0.0 unmute = True last = (320, 240) hold = False beginhold = 0.0 #initializes skin color, then runs through mouse control ccolor = bgr_ycrcb(setup(flipped, capture, thehandcolor)) cv.DestroyWindow("w3") while cv.WaitKey(10) != 27: begin, unmute, last, hold, beginhold = repeat1(begin, unmute, last, hold, beginhold)
# Game of Life from copy import deepcopy from time import sleep, time from subprocess import Popen, PIPE from termcolor import colored, cprint from random import randint, choice import os import signal # get size of terminal #width stdout = Popen('tput cols', shell=True, stdout=PIPE).stdout WIDTH = int( stdout.read() ) #height stdout = Popen('tput lines', shell=True, stdout=PIPE).stdout HEIGHT = int( stdout.read() ) - 1 # ------- SPECIAL HANDLING FUNCTIONS ------- # def signal_handler(signum, frame): # (Show the cursor again) os.system('echo "\x1b[?25h"') os.system('tput sgr0') sys.exit() signal.signal(signal.SIGINT, signal_handler) # Hide the cursor os.system('echo -ne "\x1b[?25l"')
import xml.dom.minidom from subprocess import Popen, PIPE stdout = Popen('ls | grep .txt', shell=True, stdout=PIPE).stdout xmlFiles = stdout.read().split() print(xmlFiles) for a in xmlFiles: print(f"Converting {a}") doc = xml.dom.minidom.parse(a.decode()) pretty_xml_as_string = doc.toprettyxml() fin = open(a.decode().replace(".txt", ".xml"), "w") fin.write(pretty_xml_as_string) fin.close()
def collect(self, device, log): wcmd = self.prepare(device, log) wrun = Popen(wcmd, shell=True, stdout=PIPE).stdout return wrun.read()
return def rendering_text(input_file, output_file): stdout = Popen( 'mencoder %s -subpos 85 -sub 2.srt -o converted_videos/%s -oac copy -ovc lavc -lavcopts vbitrate=1200 -subfont-text-scale 3' % (input_file, output_file), shell=True, stdout=PIPE).stdout print "Converting %s to %s " % (input_file, output_file) stdout.read() return stdout = Popen('ls -1 *.avi ', shell=True, stdout=PIPE).stdout video_files = [] #get the list of files from pipe output video_files = stdout.read() #store output in an array video_files = video_files.split() #print "Converting %d file(s)." %len(video_files) for i, input_file in enumerate(video_files): output_file = '%s_%s' % ('subs', input_file) print "Converting %d of %d file(s)..." % (i + 1, len(video_files)) srt_create(input_file) rendering_text(input_file, output_file)
def try_report(cr, uid, rname, ids, data=None, context=None, our_module=None): """ Try to render a report <rname> with contents of ids This function should also check for common pitfalls of reports. """ if our_module: log = logging.getLogger('tests.%s' % our_module) else: log = logging.getLogger('tools.test_reports') if data is None: data = {} if context is None: context = {} if rname.startswith('report.'): rname_s = rname[7:] else: rname_s = rname log.log(netsvc.logging.TEST, " - Trying %s.create(%r)", rname, ids) res = netsvc.LocalService(rname).create(cr, uid, ids, data, context) if not isinstance(res, tuple): raise RuntimeError("Result of %s.create() should be a (data,format) tuple, now it is a %s" % \ (rname, type(res))) (res_data, res_format) = res if not res_data: raise ValueError("Report %s produced an empty result!" % rname) if tools.config['test_report_directory']: file(os.path.join(tools.config['test_report_directory'], rname+ '.'+res_format), 'wb+').write(res_data) log.debug("Have a %s report for %s, will examine it", res_format, rname) if res_format == 'pdf': if res_data[:5] != '%PDF-': raise ValueError("Report %s produced a non-pdf header, %r" % (rname, res_data[:10])) res_text = False try: fd, rfname = tempfile.mkstemp(suffix=res_format) os.write(fd, res_data) os.close(fd) fp = Popen(['pdftotext', '-enc', 'UTF-8', '-nopgbrk', rfname, '-'], shell=False, stdout=PIPE).stdout res_text = tools.ustr(fp.read()) os.unlink(rfname) except Exception: log.debug("Unable to parse PDF report: install pdftotext to perform automated tests.") if res_text is not False: for line in res_text.split('\n'): if ('[[' in line) or ('[ [' in line): log.error("Report %s may have bad expression near: \"%s\".", rname, line[80:]) # TODO more checks, what else can be a sign of a faulty report? elif res_format == 'foobar': # TODO pass else: log.warning("Report %s produced a \"%s\" chunk, cannot examine it", rname, res_format) return False log.log(netsvc.logging.TEST, " + Report %s produced correctly.", rname) return True
def get_last(name): path = os.path.join("data", "garden", name) + ".rrd" p = Popen("rrdtool lastupdate %s" % path, shell=True, stdout=PIPE).stdout lines = p.read().split("\n") stamp, val = lines[2].split(":") return stamp_to_str(float(stamp)), "%0.00f" % float(val)
def calculateForNativeAndASN1SCC(absASN1SCCpath, autosrc, names, inputFiles): base = "ShowNativeBuffers" acn = " -ACN " if any(x.lower().endswith(".acn") for x in inputFiles) else "" inputASN1files = [x for x in inputFiles if not x.lower().endswith('.acn')] # Spawn ASN1SCC.exe compiler - for MacOS define a new sh file calling mono Asn1f2.exe if platform.system() == "Windows" or platform.system() == "Darwin": mysystem( "%s -c -uPER -o \"%s\" %s %s" % (absASN1SCCpath, autosrc, acn, '"' + '" "'.join(inputFiles) + '"')) else: cmd = "mono %s -c -uPER -fp AUTO -typePrefix asn1Scc -o \"%s\" %s %s" % ( absASN1SCCpath, autosrc, acn, '"' + '" "'.join(inputFiles) + '"') res = mysystem(cmd) if res != 0: panic("This command failed: %s\n" % cmd) msgEncoderFile = open(autosrc + os.sep + base + ".stats.c", 'w') # msgEncoderFile.write('#include "DumpableTypes.h"\n') for a in inputASN1files: msgEncoderFile.write('#include "%s.h"\n' % os.path.splitext(os.path.basename(a))[0]) uniqueASNfiles = {} for asnFile in inputASN1files: tmpNames = {} for name in asnParser.g_typesOfFile[asnFile]: tmpNames[name] = asnParser.g_names[name] uniqueASNfiles[asnFile] = [ copy.copy( tmpNames), # map Typename to type definition class from asnAST copy.copy(asnParser.g_astOfFile[asnFile] ), # list of nameless type definitions copy.copy(asnParser.g_leafTypeDict) ] # map from Typename to leafType configMT.outputDir = autosrc + os.sep # dumpable.CreateDumpableCtypes(uniqueASNfiles) for asnTypename in sorted(list(names.keys())): node = names[asnTypename] if node._isArtificial: continue cleaned = cleanNameAsAsn1cWants(asnTypename) msgEncoderFile.write('static asn1Scc%s sizeof_%s;\n' % (cleaned, cleaned)) msgEncoderFile.write( 'char bytesEncoding_%s[asn1Scc%s_REQUIRED_BYTES_FOR_ENCODING];\n' % (cleaned, cleaned)) if acn != "" and node.hasAcnEncDec: msgEncoderFile.write( 'char bytesAcnEncoding_%s[asn1Scc%s_REQUIRED_BYTES_FOR_ACN_ENCODING];\n' % (cleaned, cleaned)) msgEncoderFile.close() # Code generation - asn1c part # Create a dictionary to lookup the asn-types from their corresponding c-type namesDict = {} for asnTypename in sorted(list(names.keys())): node = names[asnTypename] if node._isArtificial: continue namesDict[cleanNameAsAsn1cWants(asnTypename)] = asnTypename # Get a list of all available compilers platformCompilers = ['gcc'] try: pipe = Popen("find-supported-compilers", stdout=PIPE).stdout platformCompilers = pipe.read().splitlines() except OSError as err: print( 'Not running in a TASTE Environment: {}\nUsing GCC only for computing sizeofs' .format(str(err))) platformCompilers = ['gcc'.encode()] # Get the maximum size of each asn1type from all platform compilers messageSizes = {} for cc in platformCompilers: # Compile the generated C-file with each compiler pwd = os.getcwd() os.chdir(autosrc) path_to_compiler = spawn.find_executable(cc.decode('utf-8')) if path_to_compiler is None: os.chdir(pwd) continue for cfile in os.listdir("."): if cfile.endswith(".c"): if mysystem('%s -c -std=c99 -I. "%s" 2>"%s.stats.err"' % (path_to_compiler, cfile, base)) != 0: panic("Compilation with %s failed...\n" "(report inside '%s')\n" % (cc, os.path.join(autosrc, base + ".stats.err"))) os.chdir(pwd) # Receive the size information for each value from the compiled object file if platform.system() == "Darwin": nm = "gnm" else: nm = "nm" for line in os.popen(nm + " --print-size " + autosrc + os.sep + base + ".stats.o").readlines(): try: (dummy, size, dummy2, msg) = line.split() except ValueError: # Ignore lines that are not well-formatted continue # Remove prefix asnType = msg.split('_', 1)[1] # get asn-type from cleaned type asnType = namesDict[asnType] assert asnType in list(names.keys()) # Find maximum messageSizes.setdefault(asnType, 0) messageSizes[asnType] = max(int(size, 16), messageSizes[asnType]) return messageSizes
for kernelname in kernel_dict: if kernelname != '__kernel__' and not kernelname.startswith('com.apple.'): bundle_path = kernel_dict[kernelname]['OSBundlePath'] bundle_version = kernel_dict[kernelname]['CFBundleVersion'] bundle_executable = kernel_dict[kernelname]['OSBundleExecutablePath'] bundle_codesign = '' developer_name = '' team_id = '' from subprocess import Popen, PIPE stdout = Popen("/usr/bin/codesign -dv --verbose=4 '" + bundle_path + "'", shell=True, stderr=PIPE).stderr output = stdout.read() for line in output.splitlines(): if "Authority=Developer ID Application: " in line: bundle_codesign = line.replace( "Authority=Developer ID Application: ", "") developer_name = " ".join(bundle_codesign.split()[:-1]) team_id = bundle_codesign.split()[-1].strip("()") info[str(count)] = { 'bundle_id': kernelname, 'path': bundle_path, 'version': bundle_version, 'executable': bundle_executable, 'developer': developer_name, 'teamid': team_id
def bcCatFile(self, filename, inode, image, dfxmlfile, redirect_file, outfile): # Traverse the XML file, get the file_name, extract the inode number # of the file and run icat to extract the data. ## print(">>D: bcCatFile: Filename: ", filename) ## print(">>D: bcCatFile: image: ", image) ## print(">>D: bcCatFile: dfxmlfile: ", dfxmlfile) ## print(">>D: bcCatFile: outfile: ", outfile) x = Ui_MainWindow #x.oldstdout = sys.stdout #sys.stdout = StringIO() # Traverse through dfxmlfile to get the block containing # "filename" to extract the inode. Do this just once. if len(self.fiDictList) == 0: self.bcProcessDfxmlFileUsingSax(dfxmlfile) ## print("D: Length of fiDictList ", len(self.fiDictList)) # Dictionary is formed. Now traverse through the array and # in each dictionary, get the inode and call iCat command. for i in range(0, len(self.fiDictList)-1): if (self.fiDictList[i]['filename'] == filename and self.fiDictList[i]['inode'] == inode): ## print("D: Extracting the contents of the file:inode ", \ ## filename, self.fiDictList[i]['inode']) # Get the offset of the 2nd partition using mmls cmd # ex: mmls -i aff ~/aaa/jo-favorites-usb-2009-12-11.aff if image.endswith(".E01") or image.endswith(".e01"): imgtype = 'ewf' elif image.endswith(".aff") or image.endswith(".AFF"): imgtype = 'aff' elif image.endswith(".iso") or image.endswith(".ISO"): imgtype = 'iso' else: imgtype = 'raw' # Extract the file-system type from dfxml file volume ftype = self.bc_get_ftype_from_sax(dfxmlfile) # For FAT12 file-system there is no partiton information. # So skip the step for extracting partition offset. part2_start = 0 if self.ftype != 'fat12' and self.ftype != 'iso9660' and imgtype != 'iso': mmls_cmd = "mmls -i " + imgtype +" "+image +" | grep \"02:\"" ## print("D: Executing mmls command: ", mmls_cmd) part2 = subprocess.check_output(mmls_cmd, shell=True) ## print("D: Extracting partition-2: ", part2) part2_list = part2.split() part2_start = int(part2_list[2]) ## print("D: Start offset of Partition-2: ", part2_start) ## icat_cmd ex: icat -o 1 ~/aaa/charlie-work-usb-2009-12-11.aff 130 # redirect_file is set to True if the contents need to be # written to a file. if (redirect_file == True): outfile = self.bcHandleSpecialChars(outfile) icat_cmd = "icat -o "+str(part2_start)+ " "+ \ image + " " + \ self.fiDictList[i]['inode'] + ' > ' + outfile f2 = Popen(icat_cmd, shell = True, stdout=PIPE, stderr=PIPE) (data, err) = f2.communicate() # FIXME: Using subprocess.check_output is making icat_cmd # fail for some instances. Revisit this. Till then the # older call os.popen is used, which seems to work fine. # subprocess.check_output(icat_cmd, shell=True) ## print(">> Writing to file ", outfile) else: # Only printable files are dumped on the textEdit wondow. # The rest are redirected to a file in /tmp if (filename.endswith('txt') or filename.endswith('xml')): icat_cmd = "icat -o "+str(part2_start)+ " "+ image + " " + self.fiDictList[i]['inode'] ## print(">> D: Executing iCAT command: ", icat_cmd) f2 = os.popen(icat_cmd) icat_out = f2.read() print(">> Dumping Contents of the file :", filename) print("\n") print(icat_out) else: # Strip the path to extract just the name of the file. justFilename = self.bcGetFilenameFromPath(filename) icat_cmd = "icat -o "+str(part2_start)+ " "+ \ image + " " + \ self.fiDictList[i]['inode'] + ' > /tmp/'+justFilename f2 = os.popen(icat_cmd) # Open the file in the pdf reader if it is a PDF file # else copy it to a file in /tmp if justFilename.endswith('pdf'): print(">>> Opening the PDF file /tmp/",justFilename) os.system('evince /tmp/'+justFilename) else: print(">>> File copied to: ", '/tmp/'+justFilename) return
class SystemSSHTransport(Transport): def __init__( self, host: str = "", port: int = 22, auth_username: str = "", auth_private_key: str = "", auth_password: str = "", auth_strict_key: bool = True, auth_bypass: bool = False, timeout_socket: int = 5, timeout_transport: int = 5, timeout_ops: int = 10, timeout_exit: bool = True, keepalive: bool = False, keepalive_interval: int = 30, keepalive_type: str = "", keepalive_pattern: str = "\005", comms_prompt_pattern: str = r"^[a-z0-9.\-@()/:]{1,32}[#>$]$", comms_return_char: str = "\n", comms_ansi: bool = False, ssh_config_file: str = "", ssh_known_hosts_file: str = "", transport_options: Optional[Dict[str, Any]] = None, ) -> None: """ SystemSSHTransport Object Inherit from Transport ABC SSH2Transport <- Transport (ABC) If using this driver, and passing a ssh_config_file (or setting this argument to `True`), all settings in the ssh config file will be superseded by any arguments passed here! SystemSSHTransport *always* prefers public key auth if given the option! If auth_private_key is set in the provided arguments OR if ssh_config_file is passed/True and there is a key for ANY match (i.e. `*` has a key in ssh config file!!), we will use that key! If public key auth fails and a username and password is set (manually or by ssh config file), password auth will be attempted. Note that comms_prompt_pattern, comms_return_char and comms_ansi are only passed here to handle "in channel" authentication required by SystemSSH -- these are assigned to private attributes in this class and ignored after authentication. If you wish to modify these values on a "live" scrapli connection, modify them in the Channel object, i.e. `conn.channel.comms_prompt_pattern`. Additionally timeout_ops is passed and assigned to _timeout_ops to use the same timeout_ops that is used in Channel to decorate the authentication methods here. Args: host: host ip/name to connect to port: port to connect to auth_username: username for authentication auth_private_key: path to private key for authentication auth_password: password for authentication auth_strict_key: True/False to enforce strict key checking (default is True) auth_bypass: bypass ssh key or password auth for devices without authentication, or that have auth prompts after ssh session establishment timeout_socket: timeout for ssh session to start -- this directly maps to ConnectTimeout ssh argument; see `man ssh_config` timeout_transport: timeout for transport in seconds. since system ssh is using popen/pty we can't really set a timeout directly, so this value governs the time timeout decorator for the transport read and write methods timeout_ops: timeout for telnet channel operations in seconds -- this is also the timeout for finding and responding to username and password prompts at initial login. This is assigned to a private attribute and is ignored after authentication is completed. timeout_exit: True/False close transport if timeout encountered. If False and keepalives are in use, keepalives will prevent program from exiting so you should be sure to catch Timeout exceptions and handle them appropriately keepalive: whether or not to try to keep session alive keepalive_interval: interval to use for session keepalives keepalive_type: network|standard -- 'network' sends actual characters over the transport channel. This is useful for network-y type devices that may not support 'standard' keepalive mechanisms. 'standard' is not currently implemented for system ssh keepalive_pattern: pattern to send to keep network channel alive. Default is u'\005' which is equivalent to 'ctrl+e'. This pattern moves cursor to end of the line which should be an innocuous pattern. This will only be entered *if* a lock can be acquired. This is only applicable if using keepalives and if the keepalive type is 'network' comms_prompt_pattern: prompt pattern expected for device, same as the one provided to channel -- system ssh needs to know this to know how to decide if we are properly sending/receiving data -- i.e. we are not stuck at some password prompt or some other failure scenario. If using driver, this should be passed from driver (Scrape, or IOSXE, etc.) to this Transport class. This is assigned to a private attribute and is ignored after authentication is completed. comms_return_char: return character to use on the channel, same as the one provided to channel -- system ssh needs to know this to know what to send so that we can probe the channel to make sure we are authenticated and sending/receiving data. If using driver, this should be passed from driver (Scrape, or IOSXE, etc.) to this Transport class. This is assigned to a private attribute and is ignored after authentication is completed. comms_ansi: True/False strip comms_ansi characters from output; this value is assigned self._comms_ansi and is ignored after authentication. We only need it for transport on the off chance (maybe never?) that username/password prompts contain ansi characters, otherwise "comms_ansi" is really a channel attribute and is treated as such. This is assigned to a private attribute and is ignored after authentication is completed. ssh_config_file: string to path for ssh config file ssh_known_hosts_file: string to path for ssh known hosts file transport_options: SystemSSHTransport specific transport options (options that don't apply to any of the other transport classes) supplied in a dictionary where the key is the name of the option and the value is of course the value. - open_cmd: string or list of strings to extend the open_cmd with, for example: `["-o", "KexAlgorithms=+diffie-hellman-group1-sha1"]` or: `-oKexAlgorithms=+diffie-hellman-group1-sha1` these commands will be appended to the open command that scrapli builds which looks something like the following depending on the inputs provided: ssh 172.31.254.1 -p 22 -o ConnectTimeout=5 -o ServerAliveInterval=10 -l scrapli -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -F /dev/null You can pass any arguments that would be supported if you were ssh'ing on your terminal "normally", passing some bad arguments can break things! Returns: N/A # noqa: DAR202 Raises: N/A """ super().__init__( host, port, timeout_socket, timeout_transport, timeout_exit, keepalive, keepalive_interval, keepalive_type, keepalive_pattern, ) self.auth_username: str = auth_username self.auth_private_key: str = auth_private_key self.auth_password: str = auth_password self.auth_strict_key: bool = auth_strict_key self.auth_bypass: bool = auth_bypass self._timeout_ops: int = timeout_ops self._comms_prompt_pattern: str = comms_prompt_pattern self._comms_return_char: str = comms_return_char self._comms_ansi: bool = comms_ansi self._process_ssh_config(ssh_config_file) self.ssh_known_hosts_file: str = ssh_known_hosts_file self.session: Union[Popen[bytes], PtyProcess] # pylint: disable=E1136 self.lib_auth_exception = ScrapliAuthenticationFailed self._isauthenticated = False # ensure we set transport_options to a dict if its left as None self.transport_options = transport_options or {} self.open_cmd = ["ssh", self.host] self._build_open_cmd() # create stdin/stdout fd in case we can use pipes for session self._stdin_fd = -1 self._stdout_fd = -1 def _process_ssh_config(self, ssh_config_file: str) -> None: """ Method to parse ssh config file Ensure ssh_config_file is valid (if providing a string path to config file), or resolve config file if passed True. Search config file for any private key, if ANY matching key is found and user has not provided a private key, set `auth_private_key` to the value of the found key. This is because we prefer to use `open_pipes` over `open_pty`! Args: ssh_config_file: string path to ssh config file; passed down from `Scrape`, or the `NetworkDriver` or subclasses of it, in most cases. Returns: N/A # noqa: DAR202 Raises: N/A """ ssh = SSHConfig(ssh_config_file=ssh_config_file) self.ssh_config_file = ssh.ssh_config_file host_config = ssh.lookup(host=self.host) if not self.auth_private_key and host_config.identity_file: self.auth_private_key = os.path.expanduser(host_config.identity_file.strip()) def _build_open_cmd(self) -> None: """ Method to craft command to open ssh session Args: N/A Returns: N/A # noqa: DAR202 Raises: N/A """ self.open_cmd.extend(["-p", str(self.port)]) self.open_cmd.extend(["-o", f"ConnectTimeout={self.timeout_socket}"]) self.open_cmd.extend(["-o", f"ServerAliveInterval={self.timeout_transport}"]) if self.auth_private_key: self.open_cmd.extend(["-i", self.auth_private_key]) if self.auth_username: self.open_cmd.extend(["-l", self.auth_username]) if self.auth_strict_key is False: self.open_cmd.extend(["-o", "StrictHostKeyChecking=no"]) self.open_cmd.extend(["-o", "UserKnownHostsFile=/dev/null"]) else: self.open_cmd.extend(["-o", "StrictHostKeyChecking=yes"]) if self.ssh_known_hosts_file: self.open_cmd.extend(["-o", f"UserKnownHostsFile={self.ssh_known_hosts_file}"]) if self.ssh_config_file: self.open_cmd.extend(["-F", self.ssh_config_file]) else: self.open_cmd.extend(["-F", "/dev/null"]) user_args = self.transport_options.get("open_cmd", []) if isinstance(user_args, str): user_args = [user_args] self.open_cmd.extend(user_args) def open(self) -> None: """ Parent method to open session, authenticate and acquire shell If possible it is preferable to use the `_open_pipes` method, but we can only do this IF we can authenticate with public key authorization (because we don't have to spawn a PTY; if no public key we have to spawn PTY to deal w/ authentication prompts). IF we get a private key provided, use pipes method, otherwise we will just deal with `_open_pty`. `_open_pty` is less preferable because we have to spawn a PTY and cannot as easily tell if SSH authentication is successful. With `_open_pipes` we can read stderr which contains the output from the verbose flag for SSH -- this contains a message that indicates success of SSH auth. In the case of `_open_pty` we have to read from the channel directly like in the case of telnet... so it works, but its just a bit less desirable. Args: N/A Returns: N/A # noqa: DAR202 Raises: ScrapliAuthenticationFailed: if all authentication means fail """ self.session_lock.acquire() LOG.info(f"Attempting to authenticate to {self.host}") # if auth_bypass kick off keepalive thread if necessary and return if self.auth_bypass: LOG.info("`auth_bypass` is True, bypassing authentication") self._open_pty(skip_auth=True) self._session_keepalive() return # if authenticating with private key prefer to use open pipes # _open_pipes uses subprocess Popen which is preferable to opening a pty # if _open_pipes fails and no password available, raise failure, otherwise try password auth if self.auth_private_key: open_pipes_result = self._open_pipes() if open_pipes_result: return if not self.auth_password or not self.auth_username: msg = ( f"Failed to authenticate to host {self.host} with private key " f"`{self.auth_private_key}`. Unable to continue authentication, " "missing username, password, or both." ) LOG.critical(msg) raise ScrapliAuthenticationFailed(msg) msg = ( f"Failed to authenticate to host {self.host} with private key " f"`{self.auth_private_key}`. Attempting to continue with password authentication." ) LOG.critical(msg) # If public key auth fails or is not configured, open a pty session if not self._open_pty(): msg = f"Authentication to host {self.host} failed" LOG.critical(msg) raise ScrapliAuthenticationFailed(msg) LOG.info(f"Successfully authenticated to {self.host}") if self.keepalive: self._session_keepalive() def _open_pipes(self) -> bool: """ Private method to open session with subprocess.Popen Args: N/A Returns: bool: True/False session was opened and authenticated Raises: N/A """ # import here so that we dont blow up when running on windows (windows users need to use # ssh2 or paramiko transport) import pty # pylint: disable=C0415 # copy the open_cmd as we don't want to update the objects open_cmd until we know we can # authenticate. add verbose output and disable batch mode (disables passphrase/password # queries). If auth is successful update the object open_cmd to represent what was used open_cmd = self.open_cmd.copy() open_cmd.append("-v") open_cmd.extend(["-o", "BatchMode=yes"]) LOG.info(f"Attempting to open session with the following command: {open_cmd}") stdout_master_pty, stdout_slave_pty = pty.openpty() stdin_master_pty, stdin_slave_pty = pty.openpty() self.session = Popen( open_cmd, bufsize=0, shell=False, stdin=stdin_slave_pty, stdout=stdout_slave_pty, stderr=PIPE, ) # close the slave fds, don't need them anymore os.close(stdin_slave_pty) os.close(stdout_slave_pty) LOG.debug(f"Session to host {self.host} spawned") try: self._pipes_isauthenticated(self.session) except TimeoutError: # If auth fails, kill the popen session, also need to manually close the stderr pipe # for some reason... unclear why, but w/out this it will hang open if self.session.stderr is not None: stderr_fd = self.session.stderr.fileno() os.close(stderr_fd) self.session.kill() # close the ptys we forked os.close(stdin_master_pty) os.close(stdout_master_pty) # it seems that killing the process/fds somehow unlocks the thread? very unsure how/why self.session_lock.acquire() return False LOG.debug(f"Authenticated to host {self.host} with public key") # set stdin/stdout to the new master pty fds self._stdin_fd = stdin_master_pty self._stdout_fd = stdout_master_pty self.open_cmd = open_cmd self.session_lock.release() return True @operation_timeout("_timeout_ops", "Timed out determining if session is authenticated") def _pipes_isauthenticated(self, pipes_session: "PopenBytes") -> bool: """ Private method to check initial authentication when using subprocess.Popen Since we always run ssh with `-v` we can simply check the stderr (where verbose output goes) to see if `Authenticated to [our host]` is in the output. Args: pipes_session: Popen pipes session object Returns: bool: True/False session was authenticated Raises: ScrapliTimeout: if `Operation timed out` in stderr output ScrapliAuthenticationFailed: if private key permissions are too open """ if pipes_session.stderr is None: raise ScrapliTimeout(f"Could not read stderr while connecting to host {self.host}") output = b"" while True: output += pipes_session.stderr.read(65535) if f"Authenticated to {self.host}".encode() in output: self._isauthenticated = True return True if b"Operation timed out" in output: msg = f"Timed opening connection to host {self.host}" raise ScrapliTimeout(msg) if b"WARNING: UNPROTECTED PRIVATE KEY FILE!" in output: msg = ( f"Permissions for private key `{self.auth_private_key}` are too open, " "authentication failed!" ) raise ScrapliAuthenticationFailed(msg) def _open_pty(self, skip_auth: bool = False) -> bool: """ Private method to open session with PtyProcess Args: skip_auth: skip auth in the case of auth_bypass mode Returns: bool: True/False session was opened and authenticated Raises: N/A """ LOG.info(f"Attempting to open session with the following command: {self.open_cmd}") self.session = PtyProcess.spawn(self.open_cmd) LOG.debug(f"Session to host {self.host} spawned") self.session_lock.release() if skip_auth: return True self._pty_authenticate(pty_session=self.session) if not self._pty_isauthenticated(self.session): return False LOG.debug(f"Authenticated to host {self.host} with password") return True def _pty_authentication_eof_handler(self, output: bytes) -> str: """ Parse EOF messages from _pty_authenticate and create log/stack exception message Args: output: bytes output from _pty_authenticate Returns: str: message for logging/stack trace Raises: N/A """ msg = ( f"Failed to open connection to host {self.host}. Do you need to disable " "`auth_strict_key`?" ) if b"Host key verification failed" in output: msg = f"Host key verification failed for host {self.host}" elif b"Operation timed out" in output or b"Connection timed out" in output: msg = f"Timed out connecting to host {self.host}" elif b"No route to host" in output: msg = f"No route to host {self.host}" elif b"no matching cipher found" in output: msg = f"No matching cipher found for host {self.host}" ciphers_pattern = re.compile(pattern=rb"their offer: ([a-z0-9\-,]*)", flags=re.M | re.I) offered_ciphers_match = re.search(pattern=ciphers_pattern, string=output) if offered_ciphers_match: offered_ciphers = offered_ciphers_match.group(1).decode() msg = ( f"No matching cipher found for host {self.host}, their offer: {offered_ciphers}" ) return msg @operation_timeout("_timeout_ops", "Timed out looking for SSH login password prompt") def _pty_authenticate(self, pty_session: PtyProcess) -> None: """ Private method to check initial authentication when using pty_session Args: pty_session: PtyProcess session object Returns: N/A # noqa: DAR202 Raises: ScrapliAuthenticationFailed: if we receive an EOFError -- this usually indicates that host key checking is enabled and failed. """ self.session_lock.acquire() output = b"" while True: try: new_output = pty_session.read() output += new_output LOG.debug(f"Attempting to authenticate. Read: {repr(new_output)}") except EOFError: msg = self._pty_authentication_eof_handler(output) LOG.critical(msg) raise ScrapliAuthenticationFailed(msg) if self._comms_ansi: output = strip_ansi(output) if b"password" in output.lower(): LOG.info("Found password prompt, sending password") pty_session.write(self.auth_password.encode()) pty_session.write(self._comms_return_char.encode()) self.session_lock.release() break @operation_timeout("_timeout_ops", "Timed out determining if session is authenticated") def _pty_isauthenticated(self, pty_session: PtyProcess) -> bool: """ Check if session is authenticated This is very naive -- it only knows if the sub process is alive and has not received an EOF. Beyond that we lock the session and send the return character and re-read the channel. Args: pty_session: PtyProcess session object Returns: bool: True if authenticated, else False Raises: N/A """ LOG.debug("Attempting to determine if PTY authentication was successful") if pty_session.isalive() and not pty_session.eof(): prompt_pattern = get_prompt_pattern(prompt="", class_prompt=self._comms_prompt_pattern) self.session_lock.acquire() pty_session.write(self._comms_return_char.encode()) while True: # almost all of the time we don't need a while loop here, but every once in a while # fd won't be ready which causes a failure without an obvious root cause, # loop/logging to hopefully help with that fd_ready, _, _ = select([pty_session.fd], [], [], 0) if pty_session.fd in fd_ready: break LOG.debug("PTY fd not ready yet...") output = b"" while True: new_output = pty_session.read() output += new_output LOG.debug(f"Attempting validate authentication. Read: {repr(new_output)}") # we do not need to deal w/ line replacement for the actual output, only for # parsing if a prompt-like thing is at the end of the output output = output.replace(b"\r", b"") # always check to see if we should strip ansi here; if we don't handle this we # may raise auth failures for the wrong reason which would be confusing for # users if b"\x1B" in output: output = strip_ansi(output=output) channel_match = re.search(pattern=prompt_pattern, string=output) if channel_match: self.session_lock.release() self._isauthenticated = True return True if b"password:"******"password" we know auth failed (hopefully in all scenarios!) LOG.critical( "Found `password:` in output, assuming password authentication failed" ) return False if output: LOG.debug(f"Cannot determine if authenticated, \n\tRead: {repr(output)}") self.session_lock.release() return False def close(self) -> None: """ Close session and socket Args: N/A Returns: N/A # noqa: DAR202 Raises: N/A """ self.session_lock.acquire() if isinstance(self.session, Popen): self.session.kill() elif isinstance(self.session, PtyProcess): self.session.kill(1) LOG.debug(f"Channel to host {self.host} closed") self.session_lock.release() def isalive(self) -> bool: """ Check if session is alive and session is authenticated Args: N/A Returns: bool: True if session is alive and session authenticated, else False Raises: N/A """ if isinstance(self.session, Popen): if self.session.poll() is None and self._isauthenticated: return True elif isinstance(self.session, PtyProcess): if self.session.isalive() and self._isauthenticated and not self.session.eof(): return True return False @operation_timeout("timeout_transport", "Timed out reading from transport") def read(self) -> bytes: """ Read data from the channel Args: N/A Returns: bytes: bytes output as read from channel Raises: N/A """ read_bytes = 65535 if isinstance(self.session, Popen): return os.read(self._stdout_fd, read_bytes) if isinstance(self.session, PtyProcess): return self.session.read(read_bytes) return b"" @operation_timeout("timeout_transport", "Timed out writing to transport") def write(self, channel_input: str) -> None: """ Write data to the channel Args: channel_input: string to send to channel Returns: N/A # noqa: DAR202 Raises: N/A """ if isinstance(self.session, Popen): os.write(self._stdin_fd, channel_input.encode()) elif isinstance(self.session, PtyProcess): self.session.write(channel_input.encode()) def set_timeout(self, timeout: Optional[int] = None) -> None: """ Set session timeout Note that this modifies the objects `timeout_transport` value directly as this value is what controls the timeout decorator for read/write methods. This is slightly different behavior from ssh2/paramiko/telnet in that those transports modify the session value and leave the objects `timeout_transport` alone. Args: timeout: timeout in seconds Returns: N/A # noqa: DAR202 Raises: N/A """ if isinstance(timeout, int): set_timeout = timeout else: set_timeout = self.timeout_transport self.timeout_transport = set_timeout def _keepalive_standard(self) -> None: """ Send "out of band" (protocol level) keepalives to devices. Args: N/A Returns: N/A # noqa: DAR202 Raises: NotImplementedError: always, because this is not implemented for telnet """ raise NotImplementedError("'standard' keepalive mechanism not yet implemented for system.")
else: pipe = Popen(f"mkdir {parseMsg[1]} ", shell=True, stdout=PIPE).stdout client_sock.send(f"Directory {parseMsg[1]} created ".encode()) elif parseMsg[0] == "write": f = open(parseMsg[1], "w+") for i in parseMsg[2:]: f.write(i + "\n") f.close() msg_from_client = "File created" client_sock.send(msg_from_client.encode()) elif parseMsg[0] == "run": pipe = Popen(f"run {parseMsg[1]} ", shell=True, stdout=PIPE).stdout output = pipe.read() client_sock.send(output) elif parseMsg[0] == "cat": pipe = Popen(f"cat {parseMsg[1]} ", shell=True, stdout=PIPE).stdout output = pipe.read() client_sock.send(output) elif parseMsg[0] == "cd": if os.path.isdir(parseMsg[1]): os.chdir(parseMsg[1]) client_sock.send(f"Directory changed to {parseMsg[1]}".encode()) else: msg_from_client = "Invalid directory, please try again" client_sock.send(msg_from_client.encode())
if __name__ == "__main__": ######################## COMMAND LINE ARGUMENTS ######################## results = utils.parse_args("validate") DATA_DIR = results.VAL_DIR num_channels = results.num_channels NUM_GPUS = 1 if results.GPUID == None: os.environ["CUDA_VISIBLE_DEVICES"] = "0" elif results.GPUID == -1: # find maximum number of available GPUs call = "nvidia-smi --list-gpus" pipe = Popen(call, shell=True, stdout=PIPE).stdout available_gpus = pipe.read().decode().splitlines() NUM_GPUS = len(available_gpus) else: os.environ["CUDA_VISIBLE_DEVICES"] = str(results.GPUID) model_filename = results.weights thresh = results.threshold if DATA_DIR.split(os.sep)[1] == "test": dir_tag = open("host_id.cfg").read().split()[0] + "_" + DATA_DIR.split( os.sep)[1] else: dir_tag = DATA_DIR.split(os.sep)[1] experiment_name = os.path.basename(model_filename)[:os.path.basename( model_filename).find("_weights")] + "_" + dir_tag
def volcon(): """Runs volume control portion of code""" def repeat(begin, unmute, last, hold, beginhold): """Actual finger detection function, passes mute and click status""" #captures input frame frame = cv.QueryFrame(capture) #creates horizontally flipped copy of input frame to work with cv.Copy(frame, sframe) cv.Flip(sframe, sframe, 1) #makes mask of skintones dog = skin(sframe, ccolor) #inverts skintone mask to all non-skin areas cv.ConvertScale(dog, dog, -1, 255) #makes greyscale copy of frame cv.CvtColor(sframe, grey, cv.CV_BGR2GRAY) #replaces nonskin areas with white cv.Add(grey, white, grey, dog) #implements laplacian edge detection on greyscale image dst_16s2 = cv.CreateImage(cv.GetSize(bg), cv.IPL_DEPTH_16S, 1) cv.Laplace(grey, dst_16s2, 5) cv.Convert(dst_16s2, grey) #creates a threshold to binarize the image cv.Threshold(grey, grey, 75, 255, cv.CV_THRESH_BINARY) #creates contours on greyscale image storage = cv.CreateMemStorage(0) contours = cv.FindContours(grey, storage, cv.CV_RETR_TREE, cv.CV_CHAIN_APPROX_SIMPLE) #sets final display frame background to black cv.Set(cframe, 0) #sets minimum range for object detection mx = 20000 #initializes hand position to previous best = last #creates some cvSeq maxcont by copying contours maxcont = contours #goes through all contours and finds bounding box while contours: bound_rect = cv.BoundingRect(list(contours)) #if bounding box area is greater than min range or current max box if bound_rect[3] * bound_rect[2] > mx: #sets max to current object, creates position at center of box, and sets display contour to current mx = bound_rect[3] * bound_rect[2] maxcont = contours #goes to next contour contours = contours.h_next() #draws largest contour on final frame cv.DrawContours(cframe, maxcont, 255, 127, 0) if maxcont: #creates convex hull of largest contour chull = cv.ConvexHull2(maxcont, storage, cv.CV_CLOCKWISE, 1) cv.PolyLine(cframe, [chull], 1, 255) chulllist = list(chull) chull = cv.ConvexHull2(maxcont, storage, cv.CV_CLOCKWISE, 0) cdefects = cv.ConvexityDefects(maxcont, chull, storage) #filters small convexity defects and draws large ones truedefects = [] for j in cdefects: if j[3] > 30: truedefects.append(j) cv.Circle(cframe, j[2], 6, 255) #if hand is in a pointer position, detects tip of convex hull if cdefects and len(truedefects) < 4: tipheight = 481 tiploc = 0 for j in chulllist: if j[1] < tipheight: tipheight = j[1] tiploc = chulllist.index(j) best = chulllist[tiploc] #keeps last position if movement too quick, or smooths slower movement xdiff = best[0] - last[0] ydiff = best[1] - last[1] dist = math.sqrt(xdiff**2 + ydiff**2) if dist > 100: best = last else: best = (last[0] + xdiff * .75, last[1] + ydiff * .75) #draws main position circle cv.Circle(cframe, (int(best[0]), int(best[1])), 20, 255) #displays image with contours cv.ShowImage("w2", cframe) cv.MoveWindow('w2', 600, 0) #delay between frame capture c = cv.WaitKey(10) if not hold: #if largest contour covers half the screen if mx > 153600 / 2: #begins timer if not yet started if begin == 0: begin = time.time() else: #sets volume to new volume, or 0 if muted #in Linux if sysname == True: os.system('amixer set Master %s' % (.64 * unmute * (100 - best[1] / 4.8))) #in Mac else: os.system( 'osascript -e \'set volume output volume %s\'' % (.64 * unmute * (100 - best[1] / 4.8))) #if 3 seconds have passed, stops timer and switches mute status if time.time() - begin > 3: unmute = 1 - unmute begin = 0 #stops timer and sets volume to new, if unmuted else: begin = 0 #in Linux if sysname == True: os.system('amixer set Master %s' % (int(.64 * unmute * (100 - best[1] / 4.8)) * .75)) #in Mac else: os.system('osascript -e \'set volume output volume %s\'' % (int(.64 * unmute * (100 - best[1] / 4.8)) * .75)) #returns timer start, mute status, and previous hand position return (begin, unmute, best, hold, beginhold) #button.configure(text = "Mouse Control", command=mousecon) #Systemname True= Linux False= Mac stdout = Popen('uname -a', shell=True, stdout=PIPE).stdout systemname = stdout.read() sysname = True if 'Mac' in systemname: sysname = False else: sysname = True #goes through all potential cameras to choose working camera for i in range(3): capture = cv.CaptureFromCAM(i) if capture: break #takes initial picture of background bg = cv.QueryFrame(capture) #creates static white image white = cv.CreateImage((640, 480), 8, 1) cv.Set(white, 255) #creates temporary variables sframe = cv.CreateImage((640, 480), 8, 3) thehandcolor = cv.CreateImage(cv.GetSize(bg), 8, 3) flipped = cv.CreateImage(cv.GetSize(bg), 8, 3) dog = cv.CreateImage(cv.GetSize(bg), 8, 1) grey = cv.CreateImage(cv.GetSize(bg), 8, 1) cframe = cv.CreateImage(cv.GetSize(bg), 8, 1) #initializes variables for motion start time, mute status, and previous hand position begin = 0.0 unmute = True last = (320, 240) hold = False beginhold = 0.0 d = Display() s = d.screen() root = s.root #runs initialization function, then volume control until escape is pressed ccolor = bgr_ycrcb(setup(flipped, capture, thehandcolor)) while cv.WaitKey(10) != 27: begin, unmute, last, hold, beginhold = repeat(begin, unmute, last, hold, beginhold)
def send_terminal(message): pipe = Popen(message, shell=True, stdout=PIPE).stdout output = pipe.read() return output
from subprocess import Popen, PIPE stdout = Popen('s3cmd ls -c ~/.ssh/s3cfg', shell=True, stdout=PIPE).stdout queue = stdout.read() with open('buckets.txt', 'w') as file: file.writelines(queue) with open('buckets.txt', 'r') as file: q = file.readlines() del q[0] for i in range(len(q)): q[i] = q[i].split()[2] + '\n' with open('buckets.txt', 'w') as file: file.writelines(q)
def local_cmd(command): stdout = Popen(command, shell=True, stdout=PIPE).stdout return stdout.read()
def exec_single(cmd, args): pipe = Popen(cmd.format(**args), shell=True, stdout=PIPE).stdout return pipe.read().decode().strip()[:]
##THIS INEFFICIENT AF ##try to imrove after netgear package version is working from subprocess import Popen, PIPE, call import time, difflib, os #FNULL = open(os.devnull, 'w') #call('sudo nmap -sn 192.168.1.1/24', shell=True) arpList = Popen("sudo arp-scan --interface=wlx9c5c8eb53b9a --localnet", shell=True, stdout=PIPE).stdout list = str(arpList.read()) def findMACs(string): found = False #for char in list: # if char == "n": item = [str(item) for item in string] joined = "".join(item) if "a0:cc:2b:c8:be:21" in joined: print("Brady's online") found = True if "10:98:c3:00:c6:86" in joined: print("Amanda's online") found = True if "64:a2:f9:ed:4d:79" in joined: print("Dan's online") found = True if found != True:
def exec_command(cmd, workdir=None): """Executes given shell command and returns its stdout as string. :param workdir: Working directory for the command """ cmd_out = Popen(cmd, shell=True, cwd=workdir, stdout=PIPE).stdout return cmd_out.read()
try: p = Popen([ "nvidia-smi", "--query-gpu=index,utilization.gpu,memory.total,memory.used,memory.free,driver_version,name,gpu_serial,display_active,display_mode", "--format=csv,noheader,nounits" ], stdout=PIPE) except OSError: noCuda = 1 maxGPU = 0 if noCuda == 0: try: p = os.popen( 'nvidia-smi --query-gpu=index --format=csv,noheader,nounits') i = p.read().split('\n') maxGPU = int(i[-2]) + 1 except OSError: noCuda = 1 skipIfNoCuda = pytest.mark.skipif( noCuda == 1, reason="NO cuda insatllation, found through nvidia-smi") skipIfOnlyOneGPU = pytest.mark.skipif(maxGPU < 2, reason="Only one gpu") def test_mera_1d(): masks = [ Variable(torch.ByteTensor([1, 0, 1, 0, 1, 0, 1, 0])), Variable(torch.ByteTensor([1, 0, 0, 0, 1, 0, 0, 0])) ] masks_ = [
def upload(client, message): chat_id = message.from_user.id file_name = message.document.file_name username = message.from_user.username add_user(chat_id) try: download_start = app.send_message( chat_id, "<b>Downloading, Please Wait ❤️</b>\n<b>Big Files Takes More Time Don't Panic✌️</b>" ) dot = app.send_message(chat_id, "Downloading...") def progress(current, total): percent = f"{(current * 100) / total}%" downloaded = f"⚡️ **Downloaded :-** {percent}" app.edit_message_text(chat_id, dot.message_id, downloaded) app.download_media(message, progress=progress) app.edit_message_text( chat_id, download_start.message_id, "<b>✅Downloaded</b>\n<b>Uploading To </b>AnonFiles.com <b>:)</b>") app.delete_messages(chat_id, dot.message_id) change_dir = os.chdir("downloads") stdout = Popen( f'curl -F "file=@{file_name}" https://api.anonfiles.com/upload', shell=True, stdout=PIPE).stdout output = stdout.read() visit = json.loads(output) full_link = visit['data']['file']['url']['full'] short_link = visit['data']['file']['url']['short'] try: os.remove(file_name) except: pass anon_file_links = f'''❤️**Successfully Uploaded** <b>Short Link -</b> {short_link} <b>Full Link -</b> {full_link} ''' logs = f''' #Upload @{username} Did Below Request Short Link :- {short_link} Full Link :- {full_link}''' app.send_message(chat_id, anon_file_links) app.send_message(clown, logs) except: app.send_message( chat_id, "<b>Unhandled Error!🤕</b>\n<b>Kindly Restart or Contact @MxClown👀️</b>" ) try: os.remove(file_name) except: pass
s2 = py.Stream(plotly_user_config['plotly_streaming_tokens'][2]) s1.open() s2.open() weight=0.0 temp= 0.0 Button="" #the main sensor reading and plotting loop while Button[:2]!='82': pipe1 = timeout_command('sudo ./2readADC', 0.5) if pipe1 >0.0: weight=pipe1 s1.write({'x': datetime.datetime.now(), 'y': weight}) pipe2 = Popen('sudo sh showTempExternal.sh', shell=True, stdout=PIPE).stdout #If temperature is registered under 0 deg, use previous temp y1 = pipe2.read() if float(y1) >= 0: temp = y1 s2.write({'x': datetime.datetime.now(), 'y': temp}) pipe3 = Popen('sudo bw_tool -a 94 -I -D /dev/i2c-1 -R 30:b', shell=True, stdout=PIPE).stdout Button = pipe3.read() WeightScreen = 'sudo bw_tool -I -D /dev/i2c-1 -a 94 -t '+'"'+str(weight)+'"' TempScreen = 'sudo bw_tool -I -D /dev/i2c-1 -a 94 -t '+'"'+str(temp)+'"' #Popen('sudo bw_tool -I -D /dev/i2c-1 -a 94 -w 10:0', shell=True) Popen('sudo bw_tool -I -D /dev/i2c-1 -a 94 -r 16 -v 0', shell=True) Popen('sudo bw_tool -I -D /dev/i2c-1 -a 94 -r 17 -v 0', shell=True) Popen(WeightScreen, shell=True) time.sleep(0.1) Popen('sudo bw_tool -I -D /dev/i2c-1 -a 94 -r 17 -v 32', shell=True) Popen(TempScreen, shell=True)
def _config_get(): conf = Popen('jitsu run-as-hook %s/0 config-get' % (env.roles[0]), shell=True, stdout=PIPE).stdout return eval(conf.read())
import subprocess subprocess.check_output(["echo", "Hello World!"],shell=True) import subprocess process = subprocess.Popen(['echo', '"Hello stdout"'], stdout=subprocess.PIPE) stdout = process.communicate()[0] print ('STDOUT:{}'.format(stdout)) import os p = os.popen('dir m*') print(p.read()) import time from subprocess import Popen, PIPE import subprocess p = Popen(['sftp', '*****@*****.**'], stdout=PIPE, stdin=PIPE) p.stdin.write(b'LucLuc01') time.sleep(0.5) testresult = p.communicate()[0] time.sleep(0.5) print(testresult)
def run_batch_files(self): slurmrunnerpath = self.pathfinder('slurm_runner.py') unavailable = self.find_files('config*.txt', os.getcwd()) lower, upper = 0, 0 i = 0 if unavailable != []: lower = max(unavailable) + 1 else: pass if "output" in self.SBATCH_PARAMS: self.SBATCH_PARAMS["output"] = os.getcwd( ) + "/" + self.SBATCH_PARAMS["output"] for combination in self.dicts: for dicts in self.additional_dicts: sbatchfile = sb.sBatcher( os.getcwd() + '/config' + str(lower + i) + '.txt', self.SBATCH_PARAMS, slurmrunnerpath) sbatchfile.run_configurations(lower + i) command = str(sbatchfile.FILE_DIRECTORY + '/' + sbatchfile.sbatch_params['sbatch_job_name'] + "_" + str(lower + i) + ".sbatch") stdout = Popen('sbatch ' + command, shell=True, stdout=PIPE).stdout output = stdout.read() if 'error' in output.decode("utf-8"): break self.job_ids.append("".join( filter(str.isdigit, output.decode("utf-8")))) with open('config' + str(lower + i) + '.txt', 'w') as cf: cf.write(self.TESTPROBLEM + "\n") cf.write(self.OPTIMIZER + "\n") cf.write(str(combination) + "\n") if self.LR_EPOCHS and self.LR_FACTORS: dicts.update({ 'lr_sched_epochs': self.LR_EPOCHS, 'lr_sched_factors': self.LR_FACTORS }) cf.write(str(dicts) + "\n") if self.OPTIMIZER_MODULE: cf.write(self.OPTIMIZER_MODULE + "\n") if self.OPTIMIZER_PATH is not 'torch.optim': cf.write(self.OPTIMIZER_PATH + "\n") cf.close() self.job_dict[self.job_ids[i]] = combination.copy() self.job_dict[self.job_ids[i]].update({ 'testproblem': self.TESTPROBLEM, 'optimizer': self.OPTIMIZER, "additional parameters": dicts }) i += 1 upper = lower + i - 1 with open('job_dict' + str(upper) + '.txt', 'w') as jd: jd.write(str(self.job_dict)) removerpath = self.pathfinder("file_remover.py") if lower == 0: remover = sb.cleaner( removerpath, self.SBATCH_PARAMS['sbatch_job_name'], os.getcwd() + '/job_dict' + str(upper) + '.txt', lower, upper) else: remover = sb.cleaner( removerpath, self.SBATCH_PARAMS['sbatch_job_name'], os.getcwd() + '/job_dict' + str(upper) + '.txt', lower, upper) batchpath = remover.clean_up() os.system('sbatch --dependency=afterany:' + ','.join(self.job_ids) + ' ' + batchpath)
def getOutput(cmd): stdout = Popen(cmd, shell=True, stdout=PIPE).stdout output = stdout.read() output = output.decode('utf-8') return output
#!/usr/bin/env python # encoding: UTF-8 """Exercise answer 14.7 for chapter 14.""" from subprocess import Popen, PIPE # import os dirOut = Popen('ls', stdout=PIPE, shell=True).stdout filenames = dirOut.read() print filenames dirOut.close() p = Popen('sort', stdin=PIPE, stdout=PIPE, shell=True) p.stdin.write(filenames) p.stdin.close() print p.stdout.read() p.stdout.close()
try: import os except: print( "if something went wrong that mean you are missing (os) package \n please install that requirement in order to use the app" ) #check kernel type try: output = subprocess.check_output("uname -a", shell=True) if output.find('Ubuntu') != -1: try: pipens = Popen("/usr/lib/update-notifier/apt-check", shell=True, stdout=subprocess.PIPE).stdout updateResult = pipens.read() if updateResult.find('0;0') != -1: os.system('sudo apt-get update') print 'updating system ...' except: print 'Updating system problem ...' try: os.system('sudo -H pip2 install --upgrade pip') except: print 'installing pip' try: os.system('sudo apt-get -y install python-pip') except: print 'there is a problem in installing pip'