def main(): """Run the command and decide which exit code to use""" cmd_load_1 = "snmpget -v 1 -c public " + argv[1] + " 1.3.6.1.4.1.2021.10.1.3.1" cmd_load_5 = "snmpget -v 1 -c public " + argv[1] + " 1.3.6.1.4.1.2021.10.1.3.2" cmd_load_15 = "snmpget -v 1 -c public " + argv[1] + " 1.3.6.1.4.1.2021.10.1.3.3" cmd_core = "snmpwalk -v 1 -c public " + argv[1] + ' 1.3.6.1.2.1.25.3.2.1.2 | grep "3.1.3"' cl1 = cmd_load_1.split(" ") cl5 = cmd_load_5.split(" ") cl15 = cmd_load_15.split(" ") l1 = Popen(cl1, stdout=PIPE).communicate()[0].strip() l5 = Popen(cl5, stdout=PIPE).communicate()[0].strip() l15 = Popen(cl15, stdout=PIPE).communicate()[0].strip() cores = Popen([cmd_core], stdout=PIPE, shell=True).communicate()[0] cores = cores.count("\n") # len(cores.splitlines(True)) try: l1 = split(r"STRING:", l1)[1].strip(' "') except: l1 = "FAILED" try: l5 = split(r"STRING:", l5)[1].strip(' "') except: l5 = "FAILED" try: l15 = split(r"STRING:", l15)[1].strip(' "') except: l15 = "FAILED" if not l1 == "FAILED" and not l5 == "FAILED" and not l15 == "FAILED": l1 = float(l1) l5 = float(l5) l15 = float(l15) print "System Load at " + str(l1) + ", " + str(l5) + ", " + str(l15) + " on " + str( cores ) + " cores." + " | " + "'System Load(1)'=" + str(l1) + ";" + str(1 * cores) + ";" + str( 1.4 * cores ) + " 'System Load(5)'=" + str( l5 ) + ";" + str( 1 * cores ) + ";" + str( 1.4 * cores ) + " 'System Load(15)'=" + str( l15 ) + ";" + str( 1 * cores ) + ";" + str( 1.4 * cores ) if l5 > (1.3 * cores): code = 2 elif l5 > (1 * cores): code = 1 else: code = 0 else: print "SNMP Retrieve Failed" code = 3 leave_now(code)
def __check_remote_service(self,remote_host,service_keyword): """ check out services running on `remote_host` """ list_services = "ssh root@%s 'ps -A' "%remote_host services = Popen(list_services,stdout=PIPE,shell=True).stdout.read() result = "" if len(services) > 0 and services.count(service_keyword) > 0: result = "%s|active_server|up"%service_keyword elif len(services) > 0 and services.count(service_keyword) == 0: result = "%s|inactive_server|up"%service_keyword else: result = "%s|inactive_server|down"%service_keyword return result
def append_content_if_needed(path, content): """ Ensure the file has the desired content, if not the content is appended. """ # Bail out if the current file content if os.path.exists(path): user_sudo = True current_content = Popen(['cat', path], capture=True, force_sudo=use_sudo).communicate[0] if current_content.count(content) > 0: return else: use_sudo = True current_content = '' # Work with our temporary file with temporary_file() as fobj: if len(current_content): if current_content.endswith('\n'): new_content = current_content + content else: new_content = current_content + '\n' + content else: new_content = content fobj.write(new_content) fobj.flush() Popen(['cp', fobj.path, path], force_sudo=use_sudo).comunicate[0]
def get_escape_version(): """ Return the current ESCAPE version based on ``git describe`` in format: version-revision where version is the last found tag and revision is the number of commits following the tag. :return: version in format: vmajor.minor.patch[-revision-commit] :rtype: str """ # Only match version tag like v2.0.0 # cmd = "git describe --always --first-parent --tags --match v*" with open(os.devnull, 'wb') as DEVNULL: desc = Popen("git describe --always --tags --match P*.*", stdout=PIPE, stderr=DEVNULL, shell=True).communicate()[0].strip() # If Git is not installed or command is failed if not desc: from escape import __version__ return __version__ else: # If no tag is defined in the repo if not desc.count('.'): return "2.0.0-%s" % desc else: return desc
def execKL(klFile): print 'Testing ' + klFile output = Popen('kl ' + klFile, shell=True, stdin=PIPE, stdout=PIPE, stderr=STDOUT, close_fds=True).stdout.read() if args.quiet == False: print output errorCount = output.count('KL stack trace:') if errorCount > 0: if args.quiet: print output print '%s Error(s) found!!\n' % errorCount return errorCount > 0# returnCode = 1
def execKL(klFile): print 'Testing ' + klFile output = Popen('kl ' + klFile, shell=True, stdin=PIPE, stdout=PIPE, stderr=STDOUT, close_fds=True).stdout.read() if args.quiet == False: print output errorCount = output.count('KL stack trace:') if errorCount > 0: if args.quiet: print output print '%s Error(s) found!!\n' % errorCount return errorCount > 0 # returnCode = 1
def main(): ''' Searches the output of the netstat command for a specific string''' print sys.argv if len(sys.argv) <= 1: printUsage(); return; # get search string searchString = sys.argv[1]; sleepTime = 60; # 1 minute if len(sys.argv) == 3: sleepTime = int(sys.argv[2]); while True: # exec netstat result = Popen("netstat -p tcp", stdout=PIPE, shell=True).stdout.read(); # count occurance of search string print "Search string '" + searchString + "' occurred " + str(result.count(searchString)) + " times in the netstat result @ " + str(dt.datetime.now()) + "."; sleep(sleepTime);
def initProject(self, options): if os.path.exists(options.sample_list): for timepoint in open(options.sample_list, 'r'): if timepoint[:-1] == '': continue self.samples.append(tuple(timepoint[:-1].split(':'))) elif options.sample_list.count(',') > 0: for timepoint in options.sample_list.split(','): self.samples.append(tuple(timepoint.split(':'))) else: sys.stderr.write('FATAL: Error in extracting samples, please check your input.\n') if options.quiet: self.quiet = True # init outfile self.outfile = options.outfile # qvalue cutoff self.qvalue = options.qvalue # generate timepairs for timepoint1, timepoint2 in zip(self.samples[:-1], self.samples[1:]): for sample1 in timepoint1: for sample2 in timepoint2: self.timepairs.append((sample1, sample2)) self.num_proc = options.num_proc self.quiet = options.quiet self.contig_length = options.contig_length self.mapq = options.mapq self.align_perc = options.align_perc self.min_links = options.min_links # you supply either BAM dir or assembly dir + reads dir # if you supply BAMs, then it skips the BWA+Samtoools step to generate the BAM files, # it would rely on you for generate the correct BAMs (sorted and indexed). # otherwise, this will try to generate all the BAMs files needs. if options.bam_dir and os.path.exists(options.bam_dir): self.bam_dir = options.bam_dir for sample1, sample2 in self.timepairs: bamfiles = self.getBAMFiles(sample1, sample2) else: if options.assembly_dir == None or options.reads_dir == None: sys.stderr.write('FATAL: You need to either supply the BAM file directory or the assembly and reads directory.\n') exit(0) if not os.path.exists(options.assembly_dir): sys.stderr.write('FATAL: cannot locate the assembly fasta files directory, you supplied: %s\n' % options.assembly_dir) exit(0) else: self.assembly_dir = options.assembly_dir if not os.path.exists(options.reads_dir): sys.stderr.write('FATAL: cannot locate the reads directory, you supplied: %s\n' % options.reads_dir) exit(0) else: self.reads_dir = options.reads_dir # test files for timepoint in self.samples: for sample in timepoint: readsfile = self.getReadsFile(sample) assemblyfile = self.getAssemblyFile(sample) # test samtools and bwa bwaTest = Popen(options.bwa, shell=True, stdout=PIPE, stderr=PIPE).stderr.read() if not bwaTest or bwaTest.count('not found') ==1: sys.stderr.write("FATAL: BWA not found in path!\n") exit(0) else: self.bwa = options.bwa samtoolsTest = Popen(options.samtools, shell=True, stdout=PIPE, stderr=PIPE).stderr.read() if samtoolsTest.count('not found') ==1 or not samtoolsTest: sys.stderr.write("FATAL: samtools not found in path!\n") exit(0) else: self.samtools = options.samtools
import sys colorred = "\033[01;31m%s\033[00m" try: video = sys.argv[1] if not os.path.isfile(video): raise IndexError except IndexError: print colorred % "The file isn't specified, exiting." os._exit(1) from subprocess import Popen, PIPE, call cheker = "ps aux|grep '%s'" % os.path.basename(__file__) chkapp = Popen(cheker, shell=True, bufsize=200, stdout=PIPE).stdout.read() if chkapp.count('python') > 1: print colorred % "One script is already started, exiting." os._exit(1) ################################################################################# import re import gtk import Image, ImageDraw, ImageFont, ImageFilter, ImageStat from pango import AttrList, AttrFallback, SCALE, FontDescription from datetime import timedelta from random import randint try: import cPickle as pickle from cPickle import UnpicklingError
def main(argv = sys.argv[1:]): parser = OptionParser(usage = USAGE, version="Version: " + __version__) # Required arguments requiredOptions = OptionGroup(parser, "Required options", "These options are required to run BinGeR, and may be supplied in any order.") requiredOptions.add_option("-i", "--infile", type = "string", metavar = "FILE", help = "Input file with query sequences in multi-fasta format.") requiredOptions.add_option("-p", "--prefix", type = "string", metavar = "STRING", help = "Prefix for output files produced by this script.\ The files are: <prefix>.gff, <prefix>.faa, and <prefix>.ffn") parser.add_option_group(requiredOptions) # Optional arguments that need to be supplied if not the same as default optOptions = OptionGroup(parser, "Optional parameters", "There options are optional, and may be supplied in any order.") optOptions.add_option("--prodigal", type = "string", default = "prodigal", metavar = "STRING", help = "Location of prodigal executable (Hyatt D, et al, Bioinformatics, 2010).") parser.add_option_group(optOptions) (options, args) = parser.parse_args(argv) if options.infile is None: parser.error("An infile is required!") exit(0) if options.prefix is None: parser.error("An output prefix is required to supply!") exit(0) # test prodigal prodigal_test = Popen(options.prodigal, shell=True, stdout=PIPE, stderr=PIPE).stderr.read() if not prodigal_test or prodigal_test.count('not found') ==1: sys.stderr.write("FATAL: Prodigal not found in path!\n") exit(0) tempfaa = options.prefix + '.faa.tmp' tempffn = options.prefix + '.ffn.tmp' faa = options.prefix + '.faa' ffn = options.prefix + '.ffn' gff = options.prefix + '.gff' # run prodigal first # call([options.prodigal, "-a", tempfaa, "-d", tempffn, "-f", "gff",\ # "-o", gff, "-p", "meta", "-i", options.infile], # stdout = PIPE, stderr = PIPE) ofh = open(faa, 'w') for record in SeqIO.parse(tempfaa, 'fasta'): tag, start, end, strand = re.search('(.+)\_\d+\s\#\s(\d+)\s\#\s(\d+)\s\#\s(\-?\d+)\s\#\s', record.description).group(1, 2, 3, 4) new_name = tag + '|' + start + '-' + end + '|' + strand ofh.write('>%s\n%s\n' % (new_name, record.seq)) ofh.close() ofh = open(ffn, 'w') for record in SeqIO.parse(tempffn, 'fasta'): tag, start, end, strand = re.search('(.+)\_\d+\s\#\s(\d+)\s\#\s(\d+)\s\#\s(\-?\d+)\s\#\s', record.description).group(1, 2, 3, 4) new_name = tag + '|' + start + '-' + end + '|' + strand ofh.write('>%s\n%s\n' % (new_name, record.seq)) ofh.close() os.remove(tempfaa) os.remove(tempffn)
def get_running_workers(self): output = Popen(self.WORKER_CMD, shell=True, stdin=PIPE, stdout=PIPE, stderr=STDOUT).communicate()[0] return output.count(self.USERNAME)
def domain_exists(domain): virshcmd = "virsh list --name --all" domains = Popen(virshcmd, shell=True, stdout=PIPE).stdout.read() return domains.count(domain) > 0
def main(argv = sys.argv[1:]): parser = OptionParser(usage = USAGE, version = "Version: " + __version__) reqOpts = OptionGroup(parser, "Required Options", "These options are required, you may supply them in any order.") reqOpts.add_option('-i', '--infile', type = "string", metavar = "FILE", help = "The input fasta file.") reqOpts.add_option('-o', '--outfile', type = "string", metavar = "FILE", help = "The outpu tfile") parser.add_option_group(reqOpts) optOpts = OptionGroup(parser, "Optional Options", "These options are optional, you may supply them in any order.") optOpts.add_option('-t', '--num_proc', type = "int", metavar = "INT", help = "Number of processors available [default: 1]") optOpts.add_option('-m', '--muscle', type = "string", metavar = "STRING", default = 'muscle', help = "the location of MUSCLE executable [default: muscle]") optOpts.add_option('-f', '--fasttree', type = "string", metavar = "STRING", default = 'fasttree', help = "the location of FASTTREE executable [default: FastTree]") parser.add_option_group(optOpts) (options, args) = parser.parse_args(argv) if options.infile is None: parser.error("Input fasta file must be supplied!") exit(1) if options.outfile is None: parser.error("Outfile must be supplied!") exit(1) # test muscle and fasttree muscleTest = Popen(options.muscle, shell=True, stdout=PIPE, stderr=PIPE).stderr.read() if not muscleTest or muscleTest.count('not found') ==1: sys.stderr.write("FATAL: MUSCLE not found in path!\n") exit(0) else: muscle = options.muscle fasttreeTest = Popen(options.fasttree, shell=True, stdout=PIPE, stderr=PIPE).stderr.read() if not fasttreeTest or fasttreeTest.count('not found') ==1: sys.stderr.write("FATAL: FastTree not found in path!\n") exit(0) else: fasttree = options.fasttree num_proc = options.num_proc infile = options.infile outfile = options.outfile ## start analysis # kickstart sys.stdout.write("ortholog_tree_test started at %s\n"%(ctime())) sys.stdout.flush() # partition them into 100 temp parts tempdirs = [] ofhs = {} for i in range(100): tempdirs.append(tempfile.mkdtemp()) ortho_index = 0 ffh = None old_ortho_id = '' for record in SeqIO.parse(infile, 'fasta'): ortholog_id = record.id.split('|')[-1] if ortholog_id != old_ortho_id: old_ortho_id = ortholog_id ortho_index += 1 index = ortho_index % 100 ffn_file = tempdirs[index] + '/' + ortholog_id + '.ffn' if ffh != None: ffh.close() ffh = open(ffn_file, 'w') ffh.write('>%s\n%s\n' % (record.id, record.seq)) ffh.close() sys.stdout.write('Done partitioning input file!\n') all_files = [] for dir in tempdirs: ffn_files = glob.glob(dir + '/*.ffn') all_files += ffn_files ffn_files_pars = partition_list(all_files, 100) p_vals = [] for k, ffn_files in enumerate(ffn_files_pars): sys.stdout.write('Working on %i/%i orthologs...\n' % ((k+1)*len(ffn_files), len(all_files))) cmds = [[ffn, muscle, fasttree] for ffn in ffn_files] pool = mp.Pool(num_proc) x = pool.map_async(ind_test, cmds) pool.close() pool.join() for r in x.get(): p_vals += r # FDR Correction using Benjamini-Hochberg approach q_vals = BH_correction(p_vals) # output results ofh = open(outfile, 'w') ofh.write('#ortholog_id\tcategory_1\tcount_1\tcategory_2\tcount_2\tpval\tadj_pval(FDR_BH)\n') for q_val in q_vals: ortholog_id, category_1, count_1, category_2, count_2, p_val, q_val = q_val ofh.write('%s\t%s\t%i\t%s\t%i\t%.4f\t%.4f\n' % \ (ortholog_id, category_1, count_1, category_2, count_2, p_val, q_val)) ofh.close() # clean up temporary directories for dir in tempdirs: shutil.rmtree(dir) # end sys.stdout.write("ortholog_tree_test finished at %s\n"%(ctime())) sys.stdout.flush()