def analyze_ensemble(seq,filename,sample_size=100): chdir(project_dir) system("echo '>" + filename + "\n" + str(seq) + "' > " + filename + ".fa") output_ss = Popen("./3rdParty/vienna/RNAsubopt -d2 -noLP -s -p "+str(sample_size)+" < " + filename + ".fa | tail -n "+str(sample_size), stdout=PIPE, shell=True).stdout.read() l = output_ss.rstrip().split('\n') ens_st = [] string_aux = "" for st in l: string_aux += str(seq) + "\n" + str(st) + "\n" system("echo '" + str(string_aux) + "' > " + filename + ".st") output_ss = Popen("./3rdParty/vienna/RNAeval -d2 < " + filename + ".st | perl -lne 'm/.* \((.*)\)$// print $1'", stdout=PIPE, shell=True).stdout.read().rstrip() ens_st = map(lambda x: float(x), output_ss.rstrip().split('\n')[2:]) data = {} data['StructureEnsembleSample'] = ens_st data['StructureEnsembleSampleMean'] = average(ens_st) data['StructureEnsembleSampleSD'] = stddev(ens_st) system("rm %s*" % filename) # remove tmp files #system("rm %s*" % filename) #system("mv " + filename + "* tmp/unafold_files/") return data
def get_selection(all_actions): """Spawn dmenu for selection and execute the associated action.""" rofi_highlight = CONF.getboolean('dmenu', 'rofi_highlight', fallback=False) inp = [] if rofi_highlight is True: inp = [str(action) for action in all_actions] else: inp = [('== ' if action.is_active else ' ') + str(action) for action in all_actions] active_lines = [index for index, action in enumerate(all_actions) if action.is_active] inp_bytes = "\n".join(inp).encode(ENC) command = dmenu_cmd(len(inp), active_lines=active_lines) sel = Popen(command, stdin=PIPE, stdout=PIPE, env=ENV).communicate(input=inp_bytes)[0].decode(ENC) if not sel.rstrip(): sys.exit() if rofi_highlight is False: action = [i for i in all_actions if ((str(i).strip() == str(sel.strip()) and not i.is_active) or ('== ' + str(i) == str(sel.rstrip('\n')) and i.is_active))] else: action = [i for i in all_actions if str(i).strip() == sel.strip()] assert len(action) == 1, \ u"Selection was ambiguous: '{}'".format(str(sel.strip())) return action[0]
def testRemoveDuplicates(self): TestConversionTools.sample.bam = BamFile.BamFile(TestConversionTools.testPool, TestConversionTools.sample, TestConversionTools.bamFile) self.convTools.removeDuplicates(TestConversionTools.sample) self.convTools.convertToBam(TestConversionTools.sample) self.assertTrue(os.path.exists(TestConversionTools.expBamOutFile), "output file not created...") output ,error = Popen(Program.config.getPath("samtools") + " view -h " + TestConversionTools.expBamOutFile +" | wc -l", shell=True, stdout=PIPE, stderr=PIPE).communicate() self.assertEqual(output.rstrip(), "1266", "number of lines: " + output.rstrip() + " is not " + str(1266))
def get_selection(eths, aps, vpns, gsms, blues, wwan, others): """Combine the arg lists and send to dmenu for selection. Also executes the associated action. Args: args - eths: list of Actions aps: list of Actions vpns: list of Actions gsms: list of Actions blues: list of Actions wwan: list of Actions others: list of Actions """ conf = configparser.ConfigParser() conf.read(expanduser("~/.config/networkmanager-dmenu/config.ini")) rofi_highlight = False if conf.has_option('dmenu', 'rofi_highlight'): rofi_highlight = conf.getboolean('dmenu', 'rofi_highlight') inp = [] empty_action = [Action('', None)] all_actions = [] all_actions += eths + empty_action if eths else [] all_actions += aps + empty_action if aps else [] all_actions += vpns + empty_action if vpns else [] all_actions += gsms + empty_action if (gsms and wwan) else [] all_actions += blues + empty_action if blues else [] all_actions += wwan + empty_action if wwan else [] all_actions += others if rofi_highlight is True: inp = [str(action) for action in all_actions] else: inp = [('-> ' if action.is_active else ' ') + str(action) for action in all_actions] active_lines = [ index for index, action in enumerate(all_actions) if action.is_active ] inp_bytes = "\n".join([i for i in inp]).encode(ENC) command = dmenu_cmd(len(inp), active_lines=active_lines) sel = Popen(command, stdin=PIPE, stdout=PIPE, env=ENV).communicate(input=inp_bytes)[0].decode(ENC) if not sel.rstrip(): sys.exit() if rofi_highlight is False: action = [ i for i in eths + aps + vpns + gsms + blues + wwan + others if ((str(i).strip() == str(sel.strip()) and not i.is_active) or ( '-> ' + str(i) == str(sel.rstrip('\n')) and i.is_active)) ] else: action = [ i for i in eths + aps + vpns + gsms + blues + wwan + others if str(i).strip() == sel.strip() ] assert len(action) == 1, \ u"Selection was ambiguous: '{}'".format(str(sel.strip())) return action[0]
def get_selection(aps, vpns, gsms, others): """Combine the arg lists and send to dmenu for selection. Also executes the associated action. Args: args - aps: list of Actions vpns: list of Actions gsms: list of Actions others: list of Actions """ conf = configparser.ConfigParser() conf.read(expanduser("~/.config/networkmanager-dmenu/config.ini")) try: rofi_highlight = conf.getboolean('dmenu', 'rofi_highlight') except (configparser.NoOptionError, configparser.NoSectionError): rofi_highlight = False inp = [] empty_action = [Action('', None)] all_actions = [] all_actions += aps + empty_action if aps else [] all_actions += vpns + empty_action if vpns else [] all_actions += gsms + empty_action if gsms else [] all_actions += others if rofi_highlight is True: inp = [str(action) for action in all_actions] else: inp = [('** ' if action.is_active else ' ') + str(action) for action in all_actions] active_lines = [index for index, action in enumerate(all_actions) if action.is_active] inp_bytes = "\n".join([i for i in inp]).encode(ENC) command = dmenu_cmd(len(inp), active_lines=active_lines) sel = Popen(command, stdin=PIPE, stdout=PIPE, env=ENV).communicate(input=inp_bytes)[0].decode(ENC) if not sel.rstrip(): sys.exit() if rofi_highlight is False: action = [i for i in aps + vpns + gsms + others if ((str(i).strip() == str(sel.strip()) and not i.is_active) or ('** ' + str(i) == str(sel.rstrip('\n')) and i.is_active))] else: action = [i for i in aps + vpns + gsms + others if str(i).strip() == sel.strip()] assert len(action) == 1, \ u"Selection was ambiguous: '{}'".format(str(sel.strip())) return action[0]
def publish(): # Get values from Environment Variables TOKEN = Popen(['sudo', 'printenv', 'WORKOUTKEY0'], stdout=PIPE).stdout.read().decode() WORKOUT_ID = Popen(['sudo', 'printenv', 'WORKOUTID'], stdout=PIPE).stdout.read().decode() workout = { "workout_id": WORKOUT_ID.rstrip(), "token": TOKEN.rstrip(), } publish = requests.post(URL, json=workout) log('Publish')
def rename_file (video_file_name_extn, extn): # Extract the meta data into a temporary file "meta.txt" Popen ('ffmpeg -i \"' + video_file_name_extn + '\" -f ffmetadata ' + ' meta.txt' , shell=True, stdout=PIPE).stdout.read() # Extract the show name show_name = Popen ('grep show= meta.txt' , shell=True, stdout=PIPE).stdout.read() # Do not rename if it is not a show if (show_name == ''): system ('rm -rf meta.txt') return (video_file_name_extn) show_name = show_name.replace ('show=', '') show_name = show_name.rstrip() # Extract the season number season = Popen ('grep season_number meta.txt' , shell=True, stdout=PIPE).stdout.read() season = season.replace ('season_number=', '') season = season.rstrip() # Extract the episode number episode_sort = Popen ('grep episode_sort meta.txt' , shell=True, stdout=PIPE).stdout.read() episode_sort = episode_sort.replace ('episode_sort=', '') episode_sort = episode_sort.rstrip() # Extract the episode title episode_name = Popen ('grep title= meta.txt' , shell=True, stdout=PIPE).stdout.read() episode_name = episode_name.replace ('title=', '') episode_name = episode_name.rstrip() # Delete temporary "meta.txt" file. system ('rm -rf meta.txt') # Construct the complete file name final_episode_name = '' if (int(season)<10): final_episode_name = final_episode_name + show_name + ' S0' + season else: final_episode_name = final_episode_name + show_name + ' S' + season if (int(episode_sort) < 10): final_episode_name = final_episode_name + 'E0' + episode_sort else: final_episode_name = final_episode_name + 'E' + episode_sort final_episode_name = final_episode_name + ' ' + episode_name + extn # Return the constructed file name return final_episode_name
def use_dmenu(prompt, inputs): """Combine the arg lists and send to dmenu for selection. """ if len(inputs) == 0: raise Exception("Empty input list") inputs_bytes = "\n".join(inputs).encode(ENC) sel = Popen(dmenu_cmd(len(inputs), prompt), stdin=PIPE, stdout=PIPE).communicate(input=inputs_bytes)[0].decode(ENC) try: # some version of rofi return the entry index sel = inputs[int(sel)] except ValueError: pass if not sel.rstrip(): sys.exit() return sel.rstrip()
def get_revision(self, path_to_repository): if path_to_repository.startswith("~"): home_dir = os.path.expanduser("~") path_to_repository = home_dir + path_to_repository[1:] revision = Popen("git rev-parse HEAD", cwd=path_to_repository, stdout=PIPE, shell=True).stdout.read() return revision.rstrip()
def _retrWacomDeviceNames(self): """ Get the details of all connected Wacom Devices by using the "xsetwacom" command. """ logging.info("retrieving Devices") wacomDevices = [] try: output = Popen(["xsetwacom", "--list", "devices"], stdout=PIPE).communicate()[0] except FileNotFoundError: logging.error("'xsetwacom' command not found. Please install and try again") sys.exit(-1) if len(output) == 0: return for line in output.rstrip().split(b'\n'): wacomDevices.append(line.split(b'\t')) for entry in wacomDevices: if 'STYLUS' in entry[2].decode("utf-8"): logging.info("stylus device found...") self._stylusDev = Stylus(entry[0].rstrip().decode("UTF-8")) self.devices.append(self._stylusDev) elif 'ERASER' in entry[2].decode("utf-8"): logging.info("eraser device found...") self._eraserDev = Eraser(entry[0].rstrip().decode("UTF-8")) self.devices.append(self._eraserDev) elif 'TOUCH' in entry[2].decode("utf-8"): logging.info("touch device found...") self._touchDev = Touch(entry[0].rstrip().decode("UTF-8")) self.devices.append(self._touchDev)
def get_user_selection(self, actions, active_lines): command = self.config.get_launcher_cmd() command.extend(['-p', 'Network']) if active_lines: command.extend( ["-a", ",".join([str(num) for num in active_lines])]) command.append("-lines") command.append(str(len(actions))) inp_bytes = "\n".join([str(i) for i in actions ]).encode(locale.getpreferredencoding()) ENV = os.environ.copy() ENV['LC_ALL'] = 'C' sel = Popen(command, stdin=PIPE, stdout=PIPE, env=ENV).communicate(input=inp_bytes)[0].decode( locale.getpreferredencoding()) if not sel.rstrip(): sys.exit() action_idx = [i for i in actions if str(i).strip() == sel.strip()] assert len(action_idx) == 1 return action_idx[0]
def __init__(self, dist): try: if dist == 'Mac OSX': p1 = Popen(['brew', 'list', '-1'], stdout=PIPE).communicate()[0].decode("Utf-8") elif dist == 'FreeBSD': p1 = Popen(['pkg', 'info'], stdout=PIPE).communicate()[0].decode("Utf-8") elif dist == 'Arch Linux': p1 = Popen(['pacman', '-Q'], stdout=PIPE).communicate()[0].decode("Utf-8") elif dist == 'Fedora' or dist == 'openSUSE project': p1 = Popen(['rpm', '-qa'], stdout=PIPE).communicate()[0].decode("Utf-8") elif dist == 'Ubuntu' or dist == 'Debian' or dist == 'LinuxMint' or dist == 'Raspbian': p0 = Popen(['dpkg', '--get-selections'], stdout=PIPE) p1 = Popen(['grep', '-v', 'deinstall'], stdin=p0.stdout, stdout=PIPE).communicate()[0].decode("Utf-8") elif dist == 'Slackware': p1 = Popen(['ls', '/var/log/packages/'], stdout=PIPE).communicate()[0].decode("Utf-8") packages = len(p1.rstrip('\n').split('\n')) except: packages = 0 self.key = 'Packages' self.value = packages
def testAddMdTag(self): TestConversionTools.sample.bam = BamFile.BamFile( TestConversionTools.testPool, TestConversionTools.sample, TestConversionTools.bamFile) self.convTools.addMdTag(TestConversionTools.sample) self.assertTrue(os.path.exists(TestConversionTools.expBamOutFile), "output file not created...") output, error = Popen(Program.config.getPath("samtools") + " view -h " + TestConversionTools.expBamOutFile + " | wc -l", shell=True, stdout=PIPE, stderr=PIPE).communicate() self.assertEqual( output.rstrip(), "1283", "number of lines: " + output.rstrip() + " is not " + str(1283))
def packages(self, dist: str) -> str: logger.debug('Getting packages ..') try: if dist == 'Mac OSX': p1 = Popen(['brew', 'list', '-1'], stdout=PIPE).communicate()[0].decode("Utf-8") elif dist == 'FreeBSD': p1 = Popen(['pkg', 'info'], stdout=PIPE).communicate()[0].decode("Utf-8") elif dist == 'Arch Linux': p1 = Popen(['pacman', '-Q'], stdout=PIPE).communicate()[0].decode("Utf-8") elif dist == 'Fedora' or dist == 'openSUSE project': p1 = Popen(['rpm', '-qa'], stdout=PIPE).communicate()[0].decode("Utf-8") elif dist == 'Ubuntu' or dist == 'Debian' or dist == 'LinuxMint' or dist == 'Raspbian': p0 = Popen(['dpkg', '--get-selections'], stdout=PIPE) p1 = Popen(['grep', '-v', 'deinstall'], stdin=p0.stdout, stdout=PIPE).communicate()[0].decode("Utf-8") elif dist == 'Slackware': p1 = Popen(['ls', '/var/log/packages/'], stdout=PIPE).communicate()[0].decode("Utf-8") packages = len(p1.rstrip('\n').split('\n')) msg = 'Packages', f'{packages}' except Exception as e: logger.error(f'packages: {e}') msg = 'Packages', '' if self.queue is not None: self.queue.put(msg, False) logger.debug('Done packages.') return msg
def parse (self, line, output = True): if (output == True): if len (line) > 1 and line[0] != '#': m = self.atRecordRegex.match(line) if m != None: # Time time = m.group('time') # FreeBSD: # We are ignoring timezone and hope everything works # out in the end. # Date day = m.group('day') month = m.group ('month') for monthname in self.months: month = month.replace (monthname, self.months[monthname]) if int (day) < 10: day = "0" + day if int (month) < 10: month = "0" + month date = day + "." + month + "." + m.groups ()[5] job_id = m.group ('jobid') class_id = m.group ('class') user = m.group ('user') success, title, desc, manual_poscorrect, output, display, stdlocale = self.get_job_data (int (job_id)) # manual_poscorrect is only used during preparation of script execute = config.getAtbin() + " -c " + job_id # read lines and detect starter script = Popen(execute, shell = True, env = self.at_env, stdout = PIPE).stdout.read() script, dangerous = self.__prepare_script__ (script, manual_poscorrect, output, display, stdlocale) #removing ending newlines, but keep one #if a date in the past is selected the record is removed by at, this creates an error, and generally if the script is of zero length # TODO: complain about it as well script = script.rstrip() return job_id, date, time, class_id, user, script, title, dangerous, output elif (output == False): if len (line) > 1 and line[0] != '#': m = self.atRecordRegexAdd.search(line) #print "Parsing line: " + line if m != None: #print "Parse successfull, groups: " #print m.groups() job_id = m.group('jobid') return int(job_id) else: return False return False
def getWindowList(cluster): stdout, stderr = Popen(['ssh', cluster, 'tmux list-windows -t ' + session_special], stdout=PIPE).communicate() stdout = str(stdout) # print(stdout) if "[" not in stdout: return [] k = [x.split(" ")[1] for x in stdout.rstrip().split("\\n") if " " in x] k = [x[:-1] if x[-1] == '-' or x[-1] == '*' else x for x in k] return k
def spawn_tars(name, x=0, y=0): try: tars_root = Popen(['rospack', 'find', 'tars_world'], stdout=PIPE).stdout.read() tars_root = tars_root.rstrip() except Exception as er: return tars_file = tars_root+'/urdf/tars.xacro' controllers = ['joint1_position_controller', 'joint2_position_controller', 'joint3_position_controller'] return gazebo_spawn_robot(tars_file, name, controllers, x, y)
def fake(): page_template = 'flag.jinja' # Handling the workout complete request: if request.method == 'POST': # Get Values from Environment Variables dns_suffix = Popen(['sudo', 'printenv', 'DNS_SUFFIX'], stdout=PIPE).stdout.read().decode() URL = f'https://buildthewarrior{dns_suffix.rstrip()}/complete' workout_id = Popen(['sudo', 'printenv', 'WORKOUTID'], stdout=PIPE).stdout.read().decode() workout_key = Popen(['sudo', 'printenv', 'WORKOUTKEY0'], stdout=PIPE).stdout.read().decode() status = { "workout_id": workout_id.rstrip(), "token": workout_key.rstrip(), } publish = requests.post(URL, json=status) print(f'[*] POSTING to {URL} ...') return redirect('/totally-not-malware') return render_template(page_template)
def check_user_group(username, group): try: user_groups = Popen(["groups", username], stdout=PIPE).communicate()[0] user_groups = user_groups.rstrip("\n").replace(" :", "").split(" ") if group in user_groups: return True else: return False except KeyError: return False
def qsub(command, job_name=None, stdout=None, stderr=None, depend=None, n_cores=None): """ depend could be either a string or a list (or tuple, etc.) """ args = ['qsub'] if n_cores: args.extend(['-pe','generic',"%d"%n_cores]) if job_name: args.extend(['-N', job_name]) if stderr: args.extend(['-e', stderr]) if stdout: args.extend(['-o', stdout]) if depend: # in python3, use isinstance(depend, str) instead. if not isinstance(depend, basestring): depend = ','.join(depend) args.extend(['-hold_jid', depend]) out = Popen(args, stdin=PIPE, stdout=PIPE).communicate(command + '\n')[0] print out.rstrip() job_id = out.split()[2] return job_id
def getWindowList(): stdout, stderr = Popen(['tmux list-windows -t ' + session_special], stdout=PIPE, shell=True).communicate() stdout = str(stdout) if "[" not in stdout: return [] st = stdout.rstrip() sp = "\n" if "\n" in st else "\\n" k = [x.split(" ")[1] for x in st.split(sp) if " " in x] k = [x[:-1] if (x[-1] == '-' or x[-1] == '*') else x for x in k] return k
def transmitFiles(self, subject, addresses, sourceWfo, attachments, xmtScript): # assembles the command and executes it. # determine MHS WMO id for this message wmoid = "TTAA00 " if sourceWfo in ['SJU']: wmoid += "TJSJ" elif sourceWfo in ['AFG', 'AJK', 'HFO', 'GUM']: wmoid += "P" + sourceWfo elif sourceWfo in ['AER', 'ALU']: wmoid += "PAFC" elif len(sourceWfo) == 3: wmoid += "K" + sourceWfo elif len(sourceWfo) == 4: wmoid += sourceWfo else: wmoid += "XXXX" wmoid += " " + time.strftime("%d%H%M", time.gmtime(time.time())) # Transmit files - do string substitution if xmtScript is not None: cmd = copy.deepcopy(xmtScript) cmd = string.replace(cmd, "%SUBJECT", subject) cmd = string.replace(cmd, "%ADDRESSES", ",".join(addresses)) cmd = string.replace(cmd, "%WMOID", "'" + wmoid + "'") cmd = string.replace(cmd, "%ATTACHMENTS", ",".join(attachments)) self.logEvent("Transmit: ", cmd) import siteConfig from subprocess import Popen, PIPE output, err = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE).communicate() if output.find(siteConfig.GFESUITE_MHSID + "-") == -1: alertMsg = "ISC Send failed transmission to : " + ",".join( addresses) + " --> " + output + " " + err self.logProblem(alertMsg) else: self.logEvent(output.rstrip()) if len(err) > 0: self.logProblem(err.rstrip()) alertMsg = "ISC data successfully transmitted to: " + ",".join( addresses) self.logEvent(alertMsg) for file in attachments: try: os.remove(file) except OSError: self.logException("Error removing file: " + file)
def blast_vs_fasta(query, subject): """ Blast `query` against `subject`. Both must be paths to fasta file Returns list of lists, each `[sseqid, qseqid, pident, length]` """ out = Popen( ['blastn', '-query', query, '-subject', subject, '-outfmt', '10 sseqid qseqid pident length', ], stdout=PIPE ).communicate()[0] if out: result = re.split(r'[,\n]', out.rstrip())[0] return [result[i:i+4] for i in range(len(result))[0::4]]
def delete_connection(): """Display list of NM connections and delete the selected one """ conn_acts = [Action(i.get_id(), i.delete) for i in CONNS] conn_names = "\n".join([str(i) for i in conn_acts]).encode(ENC) sel = Popen(dmenu_cmd(len(conn_acts), "CHOOSE CONNECTION TO DELETE:"), stdin=PIPE, stdout=PIPE, env=ENV).communicate(input=conn_names)[0].decode(ENC) if not sel.strip(): sys.exit() action = [i for i in conn_acts if str(i) == sel.rstrip("\n")] assert len(action) == 1, u"Selection was ambiguous: {}".format(str(sel)) action[0]()
def blast_vs_db(query, db): """ Blast `subject` (fasta file) against `db` (blast db). Returns list of lists, each `[qseqid, sseqid, pident, length]` """ out = Popen( ['blastn', '-query', query, '-db', db, '-outfmt', '10 qseqid sseqid pident length', ], stdout=PIPE ).communicate()[0] if out: result = re.split(r'[,\n]', out.rstrip()) return [result[i:i+4] for i in range(len(result))[0::4]]
def git_infos(): try: from subprocess import Popen, PIPE # cwd should be the root of git repo cwd = root_path() git_rev_cmd = """git rev-parse HEAD""" git_date_cmd = """git show --format="%ci" -s HEAD""" rev = Popen([git_rev_cmd], shell=True, stdout=PIPE, cwd=cwd).communicate()[0] date = Popen([git_date_cmd], shell=True, stdout=PIPE, cwd=cwd).communicate()[0] cmd_output = rev.rstrip("\n"), date.rstrip("\n") except Exception: cmd_output = "-", "-" return cmd_output
def get_ids(self): """ Get all ids of the namespaces :return: The list of namespace ids, e.g., ['red', 'blue'] """ run_cmd = '%s list' % self.ns_cmd spaces, err = Popen(run_cmd, stdout=PIPE, stderr=PIPE, shell=True).communicate() if err: error("Failed to run %s, err=%s\n" % (run_cmd, err)) return None if not spaces: # spaces == '' return None ns_list = spaces.rstrip('\n').split('\n') ns_list.sort() return ns_list
def _readSettings(self): """ Reads the current settings of the screen device. Information is obtained with the help of the "xrandr" command. """ output = Popen(["xrandr", "--current"], stdout=PIPE).communicate()[0] devices_raw = output.rstrip().split(b'\n') for line in devices_raw: #take everything before the braces, clean it up and split on spaces line = line.split(b'(')[0].rstrip().split(b' ') if line[0] == b'LVDS1': self._name = line[0] if line[-1].decode("utf-8") in self.rotModes: self._orientation = line[-1] else: self._orientation = b'normal'
def transmitFiles(self, subject, addresses, sourceWfo, attachments, xmtScript): # assembles the command and executes it. # determine MHS WMO id for this message wmoid = "TTAA00 " if sourceWfo in ['SJU']: wmoid += "TJSJ" elif sourceWfo in ['AFG', 'AJK', 'HFO', 'GUM']: wmoid += "P" + sourceWfo elif sourceWfo in ['AER', 'ALU']: wmoid += "PAFC" elif len(sourceWfo) == 3: wmoid += "K" + sourceWfo elif len(sourceWfo) == 4: wmoid += sourceWfo else: wmoid += "XXXX" wmoid += " " + time.strftime("%d%H%M", time.gmtime(time.time())) # Transmit files - do string substitution if xmtScript is not None: cmd = copy.deepcopy(xmtScript) cmd = string.replace(cmd, "%SUBJECT", subject) cmd = string.replace(cmd, "%ADDRESSES", ",".join(addresses)) cmd = string.replace(cmd, "%WMOID", "'" + wmoid + "'") cmd = string.replace(cmd, "%ATTACHMENTS", ",".join(attachments)) self.logEvent("Transmit: ", cmd) import siteConfig from subprocess import Popen,PIPE output,err = Popen(cmd.split(" "), stdout=PIPE,stderr=PIPE).communicate() if output.find(siteConfig.GFESUITE_MHSID+"-") == -1: alertMsg = "ISC Send failed transmission to : "+",".join(addresses)+" --> "+output+" "+err self.logProblem(alertMsg) else: self.logEvent(output.rstrip()) if len(err) > 0: self.logProblem(err.rstrip()) alertMsg="ISC data successfully transmitted to: "+",".join(addresses) self.logEvent(alertMsg) for file in attachments: try: os.remove(file) except OSError: pass
def lilypond(self): lilypond_music = self.make_lily_scale() with open('/var/www/WSGI_ENV/tmp/music.html', 'w') as f: f.write(lilypond_music.lower()) # Generate lilypond html and graphic: cmd1 = ["/var/www/WSGI_ENV/bin/lilypond-book", "/var/www/WSGI_ENV/tmp/music.html", "-o", "/var/www/WSGI_ENV/tmp/output"] output1 = Popen(cmd1, stdout=PIPE).communicate()[0] c.cmd1 = cmd1 c.p1results = output1 # Find the graphic: cmd2 = ["find", "/var/www/WSGI_ENV/tmp/output/", "-name", "*.png"] output2 = Popen(cmd2, stdout=PIPE).communicate()[0] c.cmd2 = cmd2 c.graphic_path = output2.rstrip() # Open permissions on graphic: cmd3 = ["chown", "rorsini", c.graphic_path] output3 = Popen(cmd3, stdout=PIPE).communicate()[0] c.cmd3 = cmd3 c.output3 = output3 # Move graphic: cmd4 = ["mv", c.graphic_path, "/var/www/WSGI_ENV/WSGI_APPS/pybass.com/latest-release/pybass_com/public/images/music_graphic.png"] output4 = Popen(cmd4, stdout=PIPE).communicate()[0] c.cmd4 = cmd4 c.output4 = output4 return render('/main/lilypond.mako')
def CheckQt4Tools(context, tools=["moc", "uic"]): context.Message("Checking for Qt 4 tools %s... " % ", ".join(tools)) env = context.env env.SetDefault(QT4_MOCCOM="$QT4_MOC -o $TARGET $SOURCE", QT4_MOCIMPLPREFIX="moc_", QT4_MOCIMPLSUFFIX="$CXXFILESUFFIX", QT4_UICCOM="$QT4_UIC -o $TARGET $SOURCE", QT4_UISUFFIX=".ui", QT4_UICDECLPREFIX="ui_", QT4_UICDECLSUFFIX=".h") results = [] for tool in tools: if tool not in qt4tools: raise KeyError("Unknown tool %s." % tool) tool_var = "QT4_" + tool.upper() if env.get("QT4DIR") and not env["use_frameworked_qt"]: qt_bin_dir = join(env["QT4DIR"], "bin") else: qt_bin_dir = "/usr/bin" if not env.has_key(tool_var): env[tool_var] = WhereIs(tool + "-qt4", qt_bin_dir) or \ WhereIs(tool + "4", qt_bin_dir) or \ WhereIs(tool, qt_bin_dir) if not env["use_frameworked_qt"]: try: tool_location = Popen(Split("pkg-config --variable=" + tool + "_location QtCore"), stdout=PIPE).communicate()[0] tool_location = tool_location.rstrip("\n") if exists(tool_location): env[tool_var] = tool_location except OSError: pass builder_method_name = tool.capitalize() + "4" env.Append(BUILDERS={builder_method_name: qt4tools[tool][0]}) result = context.TryBuild(eval("env.%s" % builder_method_name), qt4tools[tool][1]) if not result or context.lastTarget.get_contents() == "": context.Result("no") return False context.Result("yes") return True
def set_cone(name, x, y): cone = gazebo_get_model_state(name) if not cone: # Get path to cone model try: cone_file = Popen(['rospack', 'find', 'tars_world'], stdout=PIPE).stdout.read() cone_file = cone_file.rstrip() + "/models/construction_cone/model.sdf" except Exception as err: print "Unable to add cone. Err: %s" % err return gazebo_spawn_object(name, cone_file, x, y) else: cone.pose.position.x = x cone.pose.position.y = y cone.pose.position.z = 0 cone.pose.orientation.x = cone.pose.orientation.y = cone.pose.orientation.z = 0 cone.twist.linear.x = cone.twist.linear.y = cone.twist.linear.z = 0 cone.twist.angular.x = cone.twist.angular.y = cone.twist.angular.z = 0 gazebo_set_model_state(name, cone.pose, cone.twist)
def CheckQt4Tools(context, tools = ["moc", "uic"]): context.Message("Checking for Qt 4 tools %s... " % ", ".join(tools)) env = context.env env.SetDefault( QT4_MOCCOM = "$QT4_MOC -o $TARGET $SOURCE", QT4_MOCIMPLPREFIX = "moc_", QT4_MOCIMPLSUFFIX = "$CXXFILESUFFIX", QT4_UICCOM = "$QT4_UIC -o $TARGET $SOURCE", QT4_UISUFFIX = ".ui", QT4_UICDECLPREFIX = "ui_", QT4_UICDECLSUFFIX = ".h" ) results = [] for tool in tools: if tool not in qt4tools: raise KeyError("Unknown tool %s." % tool) tool_var = "QT4_" + tool.upper() if env.get("QT4DIR") and not env["use_frameworked_qt"]: qt_bin_dir = join(env["QT4DIR"], "bin") else: qt_bin_dir = "/usr/bin" if not env.has_key(tool_var): env[tool_var] = WhereIs(tool + "-qt4", qt_bin_dir) or \ WhereIs(tool + "4", qt_bin_dir) or \ WhereIs(tool, qt_bin_dir) if not env["use_frameworked_qt"]: try: tool_location = Popen(Split("pkg-config --variable=" + tool + "_location QtCore"), stdout = PIPE).communicate()[0] tool_location = tool_location.rstrip("\n") if exists(tool_location): env[tool_var] = tool_location except OSError: pass builder_method_name = tool.capitalize() + "4" env.Append(BUILDERS = { builder_method_name : qt4tools[tool][0] } ) result = context.TryBuild(eval("env.%s" % builder_method_name), qt4tools[tool][1]) if not result or context.lastTarget.get_contents() == "": context.Result("no") return False context.Result("yes") return True
def __init__(self,dist): try: if dist == 'Mac OSX': p1 = Popen(['brew', 'list', '-1'], stdout=PIPE).communicate()[0].decode("Utf-8") elif dist == 'FreeBSD': p1 = Popen(['pkg', 'info'], stdout=PIPE).communicate()[0].decode("Utf-8") elif dist == 'Arch': p1 = Popen(['pacman', '-Q'], stdout=PIPE).communicate()[0].decode("Utf-8") elif dist == 'Fedora' or dist == 'openSUSE project': p1 = Popen(['rpm', '-qa'], stdout=PIPE).communicate()[0].decode("Utf-8") elif dist == 'Ubuntu' or dist == 'Debian' or dist == 'LinuxMint' or dist == 'Raspbian': p0 = Popen(['dpkg', '--get-selections'], stdout=PIPE) p1 = Popen(['grep', '-v', 'deinstall'], stdin=p0.stdout, stdout=PIPE).communicate()[0].decode("Utf-8") elif dist == 'Slackware': p1 = Popen(['ls', '/var/log/packages/'], stdout=PIPE).communicate()[0].decode("Utf-8") packages = len(p1.rstrip('\n').split('\n')) except: packages = 0 self.key = 'Packages' self.value = packages
def analyze_structure_prob(seq,filename,window=50,region=[]): chdir(project_dir) structure_pairs = {} data = {} system("echo '>" + filename + "\n" + str(seq) + "' > " + filename + ".fa") fnull = open(devnull, 'w') #this line is necessary to omit output generated by UNAFOLD call("3rdParty/vienna/RNAplfold -d2 -noLP -W "+str(window)+" -u 1 < " + filename + ".fa", shell = True, stdout = fnull, stderr = fnull) #code is necessary to omit output generated by RNAplfold output_ss = Popen("cat " + filename + "_lunp | awk '{print $1 \"\t\" $2}'", stdout=PIPE, shell=True).stdout.read() l = output_ss.rstrip().split('\n')[2:] for p in l: pair = p.split() if pair[1] != 'NA': structure_pairs[pair[0]]=float(pair[1]) else: structure_pairs[pair[0]]='NA' if region != []: reg_avg = 0 for pos in region: reg_avg += structure_pairs[str(pos)] reg_avg = reg_avg/len(region) data['StructureProb'] = reg_avg else: data['StructureProb'] = sum(structure_pairs.values())/len(structure_pairs.keys()) data['StructureProbList'] = structure_pairs # remove tmp files system("rm %s*" % filename) #system("mv " + filename + "* tmp/unafold_files/") fnull.close() return data
def get_duration(self): """ Uses ffprobe to get the duration of the file duration :return: file duration """ # Build ffprobe command command = self.ffprobe \ + " -v error -show_entries format=duration -of default=noprint_wrappers=1:nokey=1 " \ + self.media_file # Get duration duration = Popen(command, shell=True, stdout=PIPE).stdout.read() # Check that the duration is a float from ffprobe try: float(duration) except ValueError: raise DurationFetchError( "Could not get duration, ffprobe output: " + duration) # Assuming \n line endings as this should be running on a raspberry pi return duration.rstrip("\n")
def blast(request): """ This method runs blast locally on a user uploaded fasta file. it searches the WS260 version of the C. elegans genome with each query and then runs the ouput against its gff annotation file. """ #TODO: Make a management script that downloads a specific version of this # and updates it. data = '' query = '' j = [] if request.method == 'POST': form = BlastForm(request.POST, request.FILES) if request.FILES: query = request.FILES['file_field'].read() with open(settings.FILE_UPLOAD_TEMP_DIR + '/blah.fa', 'w') as f: f.write(query) """ This is handled by TemporaryFileUploadHandler, which writes to disk and is assigned a randomly generated filepath. The uploader is defined in settings.py """ # runs blast on local server blast_out, blast_err = Popen( [ settings.BLAST, '-query', settings.FILE_UPLOAD_TEMP_DIR + '/blah.fa', '-db', settings.BLAST_DB_DIR + '/c_elegans.PRJNA13758.WS260.genomic.fa', '-outfmt', '6', '-max_target_seqs', '1', '-culling_limit', '1', '-num_threads', '4', '-evalue', '0.00005' ], stdout=PIPE, stderr=PIPE, ).communicate() for hit in blast_out.rstrip().split('\n'): hit = hit.split('\t') print hit start = 0 end = 0 if hit[8] > hit[9]: start = hit[9] end = hit[8] else: start = hit[8] end = hit[9] region = hit[1] + ':' + start + '-' + end tabix_out, tabix_err = Popen([ settings.TABIX, settings.TABIX_DB_DIR + '/c_elegans.PRJNA13758.WS260.annotations.sorted.gff2.gz', region ], stdout=PIPE, stderr=PIPE).communicate() for tabix in tabix_out.rstrip().split('\n'): if tabix: tabix = tabix.split('\t') l = [] l.extend(( hit[0], # query name hit[1], # subject name tabix[1], # source i.e. blastx, gene tabix[2], # method, i.e. cds tabix[3], # start position tabix[4], # stop position tabix[8].replace("'", "") # features )) j.append(l) data = json.dumps(j) else: form = BlastForm() context = { 'data': data, 'form': form, } return render(request, 'blast.html', context)
appendRecursively(fileTree, pathList) setReachable(fileTree) outputs = [] printRecursively(".", fileTree, True) i = -1 def index(): global i i +=1 return "[%d]" % i if not outputs: exit(1) elif len(outputs) > 1: sys.stderr.write("Velg path:\n") print_matrix([["Index", "Path", "Antall filer"]] + map(lambda line: (index(), line[0], line[1]), outputs), sys.stderr) try: i = input() except: exit(1) else: i = 0 git_root = Popen(["git", "root"], stdout=PIPE).communicate()[0] print("%s/%s" % (git_root.rstrip(), outputs[i][0]))
THRIFT_INCLUDE = os.getenv("THRIFT_INCLUDES_PATH", "") if THRIFT_INCLUDE: cflags.append("-I{0}".format(THRIFT_INCLUDE)) else: try: p = Popen(['pkg-config', '--cflags', 'thrift'], stdout=PIPE) ret = p.wait() except: ret = 1 else: ret = p.wait() if ret != 0: print "Error: Failed to locate thrift include dir and THRIFT_INCLUDES_PATH is not set." sys.exit(1) else: cflags.extend(p.rstrip("/thrift") for p in p.communicate()[0].split()) # Plow includes cflags.extend("-I%s" % p for p in PLOW_INCLUDES) # Fix for GCC issues on Linux opt = get_config_vars("OPT")[0].split() exclude = set(["-Wstrict-prototypes"]) os.environ['OPT'] = " ".join(flag for flag in opt if flag not in exclude) # Check for cython try:
def shasum(path): out = Popen(['shasum', path], stdout=PIPE).communicate()[0] return re.split(r' *', out.rstrip())[0]
category, item, opts = process_arguments(parser) except Exception, e: error(str(e)) namepred = opts.nocase and 'iname' or 'name' command = '{0} {4}{5}cat{1} -{2} {3}.{1}.txt'.format(FIND, category, namepred, item, MAN_PAGES_PATH, os.sep) p = Popen(command, stdout=PIPE, stderr=PIPE, shell=True) out, err = p.communicate() if err: error(err) paths = [ p.rstrip('\r\n') for p in out.split('\n') if p.rstrip('\r\n')] if paths: if opts.all: args = [PAGER] args.extend(paths) call(args, shell=True) else: call([PAGER, paths[0]], shell=True) else: sys.stderr.write('Could not find {0}.\n'.format(item)) sys.exit(2)
def __init__(self): p1 = Popen(['pacman', '-Q'], stdout=PIPE).communicate()[0].decode("Utf-8") packages = len(p1.rstrip('\n').split('\n')) self.key = 'Packages' self.value = packages
def marker_detected_callback(self, data): output = Popen("zbarimg -q camera.jpg", shell=True, stdout=PIPE).communicate()[0] output = output.rstrip('\r\n') output = output.replace("QR-Code:", "") if output <> "": self.pub.publish(String(output))
print "Not that many hits between {} and {}, skipping!".format(species_1, species_2) # plt.fill_between(x, 0, y, color='#B0384D', alpha=0.5) def blast_one(query, database, word_size=28): out, err = Popen( ['blastn', '-query', query, '-db', database, '-num_alignments', '1', '-outfmt', '10 qseqid sseqid pident length', '-word_size', str(word_size), ], stdout=PIPE ).communicate() if out: result = re.split(r'[,\n]', out.rstrip()) return result else: return None def all_by_all(species_1, species_2): # results = fasta_blast(species_1, species_2) results = 'pairwise_blast/{}_{}-blast_results.xml'.format(species_1, species_2) if results: with open(results, 'r') as result_handle: blast_records = NCBIXML.parse(result_handle) hits_list = [] for blast_record in blast_records: qsp, qid = kv.fasta_id_parse(blast_record.query) query_record = kv.get_mongo_record(qsp, qid) for alignment in blast_record.alignments:
from time import strptime, mktime from subprocess import Popen, PIPE, STDOUT GRD_CMD = "/data/QCN/GMT/bin/grd2point /data/QCN/GMT/share/topo/topo30.grd -R" lonTrig = -122.3485 latTrig = 37.96850 # elevation data - usage of GRD_CMD -Rlon_min/lon_max/lat_min/lat_max # grd2point /data/QCN/GMT/share/topo/topo30.grd -R$lng/$lng2/$lat/$lat2 # > temp.xyz#output # /data/QCN/GMT/bin/grd2point /data/QCN/GMT/share/topo/topo30.grd -R-75.01/-75.00/40.00/40.01#grd2point: GMT WARNING: (w - x_min) must equal (NX + eps) * x_inc), where NX is an integer and |eps| <= 0.0001. # grd2point: GMT WARNING: w reset to -75.0083 # grd2point: GMT WARNING: (n - y_min) must equal (NY + eps) * y_inc), where NY is an integer and |eps| <= 0.0001. # grd2point: GMT WARNING: n reset to 40.0083 # -75.0041666667 40.0041666667 19 grdstr = str(lonTrig - 0.005) + "/" + str(lonTrig + 0.005) + "/" + str(latTrig - 0.005) + "/" + str(latTrig + 0.005) # cc = subprocess.check_output([GRD_CMD, grdstr]) # os.system(GRD_CMD + grdstr) cc = Popen(GRD_CMD + grdstr, shell=True, stdout=PIPE).communicate()[0] vals = cc.rstrip("\n").split("\t") myElev = 0.0 if len(vals) == 3: myElev = float(vals[2]) print myElev
#!/usr/bin/env python from subprocess import Popen, PIPE import json import sys inputs = {} if not sys.stdin.isatty(): input_data = sys.stdin.read() if input_data: inputs = json.loads(input_data) tmpl = "find -P {path} -maxdepth 0 -type d 2> /dev/null" path_list = [x['value'] for x in inputs.get('path', [])] contents = [] for paths in path_list: for path in paths: cmd = tmpl.format(path=path) out, _ = Popen(cmd, shell=True, stdout=PIPE).communicate() for content in out.rstrip().split('\n'): if content not in contents: contents.append(content) print(json.dumps({'content': [{'value': contents}]}))
def getSACMetadata(zipinname, hostid, latTrig, lonTrig, lvlTrig, lvlType, idQuake, timeQuake, depthKmQuake, latQuake, lonQuake, magQuake): global SAC_CMD, SACSWAP_CMD, GRD_CMD # elevation data - usage of GRD_CMD -Rlon_min/lon_max/lat_min/lat_max #grd2point /usr/local/gmt/share/topo/topo30.grd -R$lng/$lng2/$lat/$lat2 #> temp.xyz #output #/usr/local/gmt/bin/grd2point /usr/local/gmt/share/topo/topo30.grd -R-75.01/-75.00/40.00/40.01 #grd2point: gmt WARNING: (w - x_min) must equal (NX + eps) * x_inc), where NX is an integer and |eps| <= 0.0001. #grd2point: gmt WARNING: w reset to -75.0083 #grd2point: gmt WARNING: (n - y_min) must equal (NY + eps) * y_inc), where NY is an integer and |eps| <= 0.0001. #grd2point: gmt WARNING: n reset to 40.0083 #-75.0041666667 40.0041666667 19 #outputs closest lon/lat point and elevation in meters myElev = 0.0 # lvlType of 4 or 5 means they explicitly put in the elevation, so no need to look up if lvlType not in (4, 5): grdstr = str(lonTrig - .005) + "/" + str(lonTrig + .005) + "/" + str( latTrig - .005) + "/" + str(latTrig + .005) cc = Popen(GRD_CMD + grdstr, shell=True, stdout=PIPE).communicate()[0] vals = cc.rstrip("\n").split("\t") if len(vals) == 3: myElev = float(vals[2]) # at this point myElev is either 0 or their estimated elevation in meters based on lat/lng #lvlType should be one of: #| 1 | Floor (+/- above/below surface) | #| 2 | Meters (above/below surface) | #| 3 | Feet (above/below surface) | #| 4 | Elevation - meters above sea level | #| 5 | Elevation - feet above sea level | note 4 & 5 they input actual elevation , so use that # # we want level in meters, ideally above sea level, but now just convert to meters (1 floor = 3 m) myLevel = myElev if lvlType == 1: myLevel = myElev + (lvlTrig * 3.0) elif lvlType == 2: myLevel = myElev + lvlTrig elif lvlType == 3: myLevel = myElev + (lvlTrig * 0.3048) elif lvlType == 4: myLevel = lvlTrig elif lvlType == 5: myLevel = lvlTrig * 0.3048 # sac values to fill in are: stlo, stla, stel (for station) # evlo, evla, evdp, mag (for quake) # print "\n\nmyLevel = " + str(myLevel) + " meters\n\n" fullcmd = SAC_CMD + " << EOF\n" +\ "r " + zipinname + "\n" +\ "chnhdr kstnm " + str(hostid) + "\n" +\ "chnhdr stlo " + str(lonTrig) + "\n" +\ "chnhdr stla " + str(latTrig) + "\n" +\ "chnhdr stel " + str(myLevel) + "\n" #if myLevel != 0.0: # fullcmd = fullcmd + "chnhdr stel " + str(myLevel) + "\n" if idQuake > 0: fullcmd = fullcmd +\ "chnhdr evlo " + str(lonQuake) + "\n" +\ "chnhdr evla " + str(latQuake) + "\n" +\ "chnhdr evdp " + str(1000.0 * depthKmQuake) + "\n" +\ "chnhdr mag " + str(magQuake) + "\n" fullcmd = fullcmd +\ "chnhdr leven TRUE\n" +\ "write over \n" +\ "quit\n" +\ "EOF\n" # debug info # print fullcmd cc = Popen(fullcmd, shell=True, stdout=PIPE).communicate()[0]
from time import strptime, mktime from subprocess import Popen, PIPE, STDOUT GRD_CMD = "/data/QCN/GMT/bin/grd2point /data/QCN/GMT/share/topo/topo30.grd -R" lonTrig = -122.3485 latTrig = 37.96850 # elevation data - usage of GRD_CMD -Rlon_min/lon_max/lat_min/lat_max #grd2point /data/QCN/GMT/share/topo/topo30.grd -R$lng/$lng2/$lat/$lat2 #> temp.xyz#output #/data/QCN/GMT/bin/grd2point /data/QCN/GMT/share/topo/topo30.grd -R-75.01/-75.00/40.00/40.01#grd2point: GMT WARNING: (w - x_min) must equal (NX + eps) * x_inc), where NX is an integer and |eps| <= 0.0001. #grd2point: GMT WARNING: w reset to -75.0083 #grd2point: GMT WARNING: (n - y_min) must equal (NY + eps) * y_inc), where NY is an integer and |eps| <= 0.0001. #grd2point: GMT WARNING: n reset to 40.0083 #-75.0041666667 40.0041666667 19 grdstr = str(lonTrig - .005) + "/" + str(lonTrig + .005) + "/" + str(latTrig - .005) + "/" + str(latTrig + .005) #cc = subprocess.check_output([GRD_CMD, grdstr]) #os.system(GRD_CMD + grdstr) cc = Popen(GRD_CMD + grdstr, shell=True, stdout=PIPE).communicate()[0] vals = cc.rstrip("\n").split("\t") myElev = 0.0 if len(vals) == 3: myElev = float(vals[2]) print myElev
greater += "P" #writing print(gt_ref_count, gt_alt_count, gt_maf, gt_lower, gt_upper, greater) from subprocess import Popen, PIPE gt_ref = "C" gt_alt = "G" output, error = Popen(["samtools", "mpileup", "-r", "1:58989-58989", \ "-f", "/projects/ps-gleesonlab3/lil067/references/GRCh37_plus_decoy/hs37d5.fa",\ "-Q 13 -q0 -AB -d50000 ", \ "/projects/ps-gleesonlab6/sperm_mosaicism/Merged_Rady_Bams/6514-Blood.bam"],\ stdin=PIPE, stdout=PIPE, stderr=PIPE).communicate() output = output.decode() items = output.rstrip().split("\t") chrom = items[0] pos = int(items[1]) ref = items[2].upper() depth = int(items[3]) if depth > 0: match = items[4] quality = items[5] count, pos_n, in_base, del_base = translate_bases(ref, depth, match) print(count) print(pos_n) print(in_base) print(del_base) num_ref = count[gt_ref] + count[gt_ref.lower()] num_alt = count[gt_alt] + count[gt_alt.lower()] print(num_ref, num_alt)
def run_tests(cls, config, tests_to_run, test_group_name, output_filename): """ Run a set of tests from this performance suite. :param tests_to_run: A list of 5-tuple elements specifying the tests to run. See the 'Test Setup' section in config.py.template for more info. :param test_group_name: A short string identifier for this test run. :param output_filename: The output file where we write results. """ output_dirname = output_filename + "_logs" os.makedirs(output_dirname) out_file = open(output_filename, 'w') num_tests_to_run = len(tests_to_run) print("\nNumber of tests to run: %d" % (len(tests_to_run)) ) print(OUTPUT_DIVIDER_STRING) print("Running %d tests in %s.\n" % (num_tests_to_run, test_group_name)) failed_tests = [] cls.before_run_tests(config, out_file) for short_name, main_class_or_script, scale_factor, java_opt_sets, opt_sets in tests_to_run: print(OUTPUT_DIVIDER_STRING) print("Running test command: '%s' ..." % main_class_or_script) stdout_filename = "%s/%s.out" % (output_dirname, short_name) stderr_filename = "%s/%s.err" % (output_dirname, short_name) # Run a test for all combinations of the OptionSets given java_opt_set_arrays = [i.to_array(scale_factor) for i in java_opt_sets] opt_set_arrays = [i.to_array(scale_factor) for i in opt_sets] for java_opt_list in itertools.product(*java_opt_set_arrays): for opt_list in itertools.product(*opt_set_arrays): append_config_to_file(stdout_filename, java_opt_list, opt_list) append_config_to_file(stderr_filename, java_opt_list, opt_list) java_opts_str = " ".join(java_opt_list) opt_list_str = " ".join(opt_list) java_opts_str += " -Dsparkperf.commitSHA=" if hasattr(config, 'SPARK_EXECUTOR_URI'): java_opts_str += " -Dspark.executor.uri=" + config.SPARK_EXECUTOR_URI cmd = cls.get_spark_submit_cmd(config, main_class_or_script, opt_list, stdout_filename, stderr_filename) test_env["SPARK_SUBMIT_OPTS"] = java_opts_str print("Running command: %s\n" % cmd) # write command to a file and make it executable print("CONFIG setting docker_image %s" % config.DOCKER_IMAGE) with open ( config.SPARKPERF_RUNDIR + 'template.sh', 'a') as f: f.write('#! /bin/bash\n') f.write(cmd) os.chmod("template.sh",0775) pbench_cmd = config.PBENCH_HOME + "pbench-user-benchmark --config=mllib -- " + config.SPARKPERF_RUNDIR + "template.sh" Popen(pbench_cmd, shell=True, env=test_env).wait() app_name = opt_list[0].lower() pods = Popen("oc get pods | grep %s | head -n1 | awk '{print $1;}'" % app_name, shell=True, stdout=PIPE).stdout.read() pods = pods.rstrip() for i in range(120): test_status = Popen("oc get pods %s --template='{{.status.phase}}'" % pods, shell=True, stdout=PIPE).stdout.read() if (test_status=="Succeeded"): print(" test succeeded"); break else: print(pods + " test_status " + test_status) time.sleep(10) else: print(" timed out waiting for successful run") print("\ntest_status " + test_status) pods = Popen("oc logs -f %s >/var/lib/pbench-agent/log_%s" % (pods,pods), shell=True, stdout=PIPE).stdout.read() result_string = cls.process_output(config, short_name, opt_list, stdout_filename, stderr_filename) pbmv = Popen("/opt/pbench-agent/util-scripts/pbench-move-results --prefix=pysparkmllib", shell=True, stdout=PIPE).stdout.read() try: os.remove(config.SPARKPERF_RUNDIR + "template.sh") except: return "could not remove template.sh" if "FAILED" in result_string: failed_tests.append(short_name) out_file.write(result_string + "\n") out_file.flush() print("\nFinished running %d tests in %s.\nSee summary in %s" % (num_tests_to_run, test_group_name, output_filename)) print("\nNumber of failed tests: %d, failed tests: %s" % (len(failed_tests), ",".join(failed_tests))) print(OUTPUT_DIVIDER_STRING)
from subprocess import Popen, PIPE import re """ Print usage information about all Octave functions in given dir """ parser = argparse.ArgumentParser(description='Print usage information about all Octave functions in given dir') parser.add_argument("src_dir", help="directory to scan") args = parser.parse_args() reg = re.compile("^(?P<name>.*)\.m$") os.chdir(args.src_dir) usage = {} files_num = {} for filename in listdir('.'): if reg.match(filename): function_name = reg.search(filename).group('name') stdout, stderr = Popen("grep -s -l " + function_name + "\( *", shell=True, stdout=PIPE).communicate() filenames = stdout.rstrip('\n').split('\n') usage[function_name] = [] for name in filenames: if name != filename: usage[function_name].append(name) files_num[function_name] = len(usage[function_name]) for key, value in sorted(files_num.iteritems(), key=lambda (k,v): (v,k)): print "* " + key + ": " + str(usage[key])
def getoutput(cmd): val = Popen(cmd, shell=True, stdout=PIPE).communicate()[0].decode("utf-8") return val.rstrip().lstrip()