def main(): try: parser = OptionParser() parser.add_option('-o', '--outdir', help='output directory', dest='outdir') parser.add_option('-f', '--file', help='file', dest='file') parser.add_option('-u', '--username', help='username', dest='username') parser.add_option('-c', '--config', help='config parameters section', dest='config') (options, args) = parser.parse_args() except: parser.print_help() print "Fail Try\n" print "for help use --help" sys.exit(2) FILE = options.file USERNAME = options.username OUTDIR = options.outdir CONFIG = options.config print CONFIG f = funcs() config = getConfig(CONFIG) md5sum = stepMD5Sum(config['url'], f) calcsum = md5sum.calcMD5Sum(FILE, OUTDIR) if calcsum == "none,none" or calcsum == "none": md5sum.runDBMD5SumUpdate(OUTDIR, FILE, "NULL") else: md5sum.runDBMD5SumUpdate(OUTDIR, FILE, calcsum)
def main(): try: parser = OptionParser() parser.add_option('-r', '--runparamsid', help='runparamsid', dest='runparamsid') parser.add_option('-w', '--wkey', help='wkey', dest='wkey') parser.add_option('-i', '--insertreport', help='insert report table', dest='insertreport') parser.add_option('-f', '--func', help='function', dest='func') parser.add_option('-u', '--username', help='username', dest='username') parser.add_option('-o', '--outdir', help='output directory', dest='outdir') parser.add_option('-c', '--config', help='config', dest='config') (options, args) = parser.parse_args() except: parser.print_help() print "for help use --help" sys.exit(2) RUNPARAMSID = options.runparamsid INSERTREPORT = options.insertreport FUNC = options.func WKEY = options.wkey OUTDIR = options.outdir USERNAME = options.username CONFIG = options.config f = funcs() config = getConfig(CONFIG) print config['url'] dbcon = dbcomm(config['url'], f) if (FUNC == "insertreport"): dbcon.insertReportTable(INSERTREPORT) elif (FUNC == "insertJobStats"): jobnums = dbcon.getJobNums(WKEY) for job in jobnums: dbcon.insertJobStats(USERNAME, WKEY, job['job_num'], OUTDIR) sys.exit(0)
def main(): try: parser = OptionParser() parser.add_option('-u', '--username', help='username', dest='username') parser.add_option('-c', '--config', help='config parameters section', dest='config') (options, args) = parser.parse_args() except: parser.print_help() print "for help use --help" sys.exit(2) USERNAME = options.username CONFIG = options.config f = funcs() config = getConfig(CONFIG) backup = stepBackup(config['url'], f) try: if (CONFIG == None): print "for help use --help" sys.exit(2) print USERNAME amazon = backup.getAmazonCredentials(USERNAME) filelist = backup.getAmazonBackupList() for file_info in filelist: file_id = file_info['id'] filename = file_info['file_name'] print filename amazon_bucket = file_info['s3bucket'] amazon_bucket = str(re.sub('s3://', '', amazon_bucket)) backup.processUpload(amazon, amazon_bucket, file_id, filename) except Exception, ex: backup.stop_err('Error (line:%s)running stepBackupS3.py\n%s' % (format(sys.exc_info()[-1].tb_lineno), str(ex)))
def main(): f = funcs() #define options parser = OptionParser() parser.add_option("-i", "--inputfile", dest="filename") parser.add_option("-o", "--outputfile", dest="output") parser.add_option("-u", "--username", dest="username") parser.add_option("-c", "--config", dest="config") # parse options, args = parser.parse_args() # retrieve options FILE = options.filename OUTPUT = options.output USERNAME = options.username CONFIG = options.config config = getConfig(CONFIG) boto = botoDownload(config['url'], f) amazon = boto.getAmazonCredentials(USERNAME) boto.downloadFile(amazon, FILE, OUTPUT) sys.exit(0)
def main(): try: parser = OptionParser() parser.add_option('-b', '--barcode', help='barcode', dest='barcode') parser.add_option('-j', '--jobsubmit', help='jobsubmit', dest='jobsubmit') parser.add_option('-r', '--runparamsid', help='group id', dest='runparamsid') parser.add_option('-u', '--username', help='username', dest='username') parser.add_option('-p', '--pairedend', help='pairedend', dest='paired') parser.add_option('-a', '--amazonupload', help='amazonupload', dest='amazonupload') parser.add_option('-o', '--outdir', help='output directory', dest='outdir') parser.add_option('-c', '--config', help='config parameters section', dest='config') (options, args) = parser.parse_args() except: parser.print_help() print "for help use --help" sys.exit(2) BARCODE = options.barcode PAIRED = options.paired USERNAME = options.username RUNPARAMSID = options.runparamsid JOBSUBMIT = options.jobsubmit OUTDIR = options.outdir AMAZONUPLOAD = options.amazonupload CONFIG = options.config f = funcs() config = getConfig(CONFIG) backup = stepBackup(config['url'], f) try: if (OUTDIR == None or JOBSUBMIT == None): print "for help use --help" sys.exit(2) print BARCODE print PAIRED print JOBSUBMIT print OUTDIR print USERNAME amazon = backup.getAmazonCredentials(USERNAME) samplelist=backup.getSampleList(RUNPARAMSID, BARCODE) inputdir=OUTDIR+"/input" tablename="ngs_temp_sample_files" if (BARCODE != "NONE"): inputdir=OUTDIR+"/seqmapping/barcode" tablename="ngs_temp_lane_files" processedLibs=[] amazon_bucket="" for sample in samplelist: sample_id=sample['sample_id'] file_id=sample['file_id'] libname=sample['samplename'] filename=sample['file_name'] backup_dir=sample['backup_dir'] amazon_bucket=sample['amazon_bucket'] dir_id=sample['dir_id'] PAIRED=None if (filename.find(',')!=-1): PAIRED="Yes" backup.updateInitialFileCounts(file_id, tablename, inputdir, filename, PAIRED, dir_id) if (not [libname, sample_id] in processedLibs): backup.processFastqFiles(sample, PAIRED) processedLibs.append([libname, sample_id]) print processedLibs if (AMAZONUPLOAD.lower() != "no" and amazon!=() and amazon_bucket!=""): amazon_bucket = str(re.sub('s3://', '', amazon_bucket)) for libname, sample_id in processedLibs: print libname + ":" + str(sample_id) if (backup.checkReadCounts(sample_id, tablename) and amazon): if (filename.find(',')!=-1): files=filename.split(',') backup.uploadFile(amazon, amazon_bucket, backup_dir, libname+'.1.fastq.gz' ) backup.uploadFile(amazon, amazon_bucket, backup_dir, libname+'.2.fastq.gz' ) else: backup.uploadFile(amazon, amazon_bucket, backup_dir, libname+'.fastq.gz' ) else: print "ERROR 86: The # of read counts doesn't match: %s",libname sys.exit(86) except Exception, ex: backup.stop_err('Error (line:%s)running stepBackupS3.py\n%s'%(format(sys.exc_info()[-1].tb_lineno), str(ex)))
def factors(): i = 1 n = int(input('enter the no. of which you want to find the factors : ')) print('The factors of the given no. ', n, ' are : ') while i <= n: if n % i == 0: print(i) i += 1 #ex.6 def palin(): n = str( input('Enter a string to check whether it is a palindrome or not : ')) i = 0 j = len(n) - 1 while i != j: if n[i] == n[j]: print('palindrome string') continue elif n[i] != n[j]: print('Not a palindrome string...') break i += 1 j -= 1 a = [1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89] b = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13] funcs(a, b)
def main(): try: parser = OptionParser() parser.add_option('-u', '--username', help='defined user in the cluster', dest='username') parser.add_option('-k', '--key', help='defined key for the workflow', dest='wkey') parser.add_option('-s', '--servicename', help='service name', dest='servicename') parser.add_option('-c', '--command', help='command that is goinf to be run', dest='com') parser.add_option('-n', '--name', help='name of the run', dest='name') parser.add_option('-o', '--outdir', help='output directory', dest='outdir') parser.add_option('-f', '--config', help='configuration parameter section', dest='config') parser.add_option('-r', '--force', help='force subission', dest='force') (options, args) = parser.parse_args() except: print "OptionParser Error:for help use --help" sys.exit(2) USERNAME = options.username WKEY = options.wkey OUTDIR = options.outdir SERVICENAME = options.servicename NAME = options.name COM = options.com CONFIG = options.config FORCE = (options.force if (options.force) else "no") python = "module load python/2.7.5 && python" config = getConfig(CONFIG) f = funcs() submitjobs = submitJobs(config['url'], f) submitCommand = f.getCommand(sys.argv) exec_dir = os.path.dirname(os.path.abspath(__file__)) #print "EXECDIR" + exec_dir sdir = config['tooldir'] + "/src" track = OUTDIR + "/tmp/track" src = OUTDIR + "/tmp/src" logs = OUTDIR + "/tmp/logs" if (NAME == None): NAME = "job" success_file = track + "/" + str(NAME) + ".success" if (not os.path.exists(success_file) or FORCE != "no"): os.system("mkdir -p " + track) os.system("mkdir -p " + src) os.system("mkdir -p " + logs) logfile = "%s/JOB.%s.log" % (logs, NAME) logging.basicConfig(filename=logfile, filemode='a', format='%(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p', level=logging.DEBUG) logging.info("File Path:%s" % os.getcwd()) print "checkJob\n" result = submitjobs.checkJob(NAME, WKEY, logging) print result + "\n" if (result != "START" and FORCE == "no"): sys.exit(0) print "checkJob[DONE]\n" print "getJobParams\n" (QUEUE, TIME, MEMORY, CPU) = submitjobs.getJobParams(SERVICENAME, NAME, WKEY, logging) resources = "\'{\\\"queue\\\":\\\"%s\\\",\\\"cputime\\\":\\\"%s\\\",\\\"memory\\\":\\\"%s\\\",\\\"cpu\\\":\\\"%s\\\"}\'" % ( QUEUE, TIME, MEMORY, CPU) logging.info("resources => :%s" % (resources)) print "getJobParams[" + resources + "]\n" if (USERNAME == None): USERNAME = subprocess.check_output("whoami", shell=True).rstrip() print "USER:"******"\n" if (OUTDIR == None): OUTDIR = "~/out" if (QUEUE == None): queue = "-q short" else: queue = "-q " + str(QUEUE) COM.replace('\"{', '\'{') COM.replace('}\"', '}\'') print "COMMAND: [" + COM + "]\n" print "NAME: [" + NAME + "]\n" print "cpu: [" + CPU + "]\n" jobstatus_cmd = python + " %(sdir)s/jobStatus.py -f %(CONFIG)s -u %(USERNAME)s -k %(WKEY)s -s %(SERVICENAME)s -t %(TYPE)s -o %(OUTDIR)s -j %(NAME)s -m %(MESSAGE)s -r %(resources)s" f = open(src + "/" + NAME + ".tmp.bash", 'w') f.write("#!/bin/bash\n") f.write("#BEGINING-OF-FILE\n") f.write("cd " + exec_dir + "\n") MESSAGE = "2" TYPE = "dbSetStartTime" f.write(jobstatus_cmd % locals() + " -n $LSB_JOBID") f.write( "\n retval=$?\n if [ $retval -ne 0 ]; then\n exit 66\n fi\n" ) COMSTR = re.sub(r"'", r"''", COM) f.write("echo '" + str(COMSTR) + "'\n") f.write("\n\n" + str(COM) + "\n\n") f.write( "retval=$?\necho \"[\"$retval\"]\"\nif [ $retval -eq 0 ]; then\n") if (str(NAME) != str(SERVICENAME)): f.write("touch " + success_file + "\n") MESSAGE = "3" TYPE = "dbSetEndTime" f.write(jobstatus_cmd % locals() + " -n $LSB_JOBID") f.write( "\n retval=$?\n if [ $retval -ne 0 ]; then\n exit 66\n fi\n" ) f.write(" echo success\nelse\n echo failed\n") MESSAGE = "0" f.write(jobstatus_cmd % locals() + " -n $LSB_JOBID") f.write( "\n retval=$?\n if [ $retval -ne 0 ]; then\n exit 66\n fi\n" ) f.write(" exit 127\nfi\ndate\n") f.write("#END-OF-FILE\n") f.close() os.system("chmod 755 " + src + "/" + NAME + ".tmp.bash") f = open(src + "/" + NAME + ".submit.bash", 'w') f.write(src + "/" + NAME + ".tmp.bash > " + logs + "/$LSB_JOBID.std 2>&1") f.close() os.system("chmod 755 " + src + "/" + NAME + ".submit.bash") f = open(src + "/" + NAME + ".submit.log", 'w') #CHANGE this submition script according to the system. #PUT TRY CATCH HERE command = "bsub " + queue + " -R \"select[os=rh6.4 || os=rh6.5]\" -P dolphin -R \"span[hosts=1]\" -n " + str( CPU ) + " -W " + str(TIME) + " -R \"rusage[mem=" + str( MEMORY ) + "]\" -J " + NAME + " -o " + logs + " < " + src + "/" + NAME + ".submit.bash" print command f.write("SUBMIT SCRIPT[" + command + "]\n\n") output = submitjobs.runcmd(command) f.write("SUBMIT OUT:[" + str(output) + "]\n") words = re.split('[\<\>]+', str(output)) num = words[1] MESSAGE = "1" TYPE = "dbSubmitJob" submitCommand = re.sub(r"'", r"''", submitCommand) jobstatus_cmd = jobstatus_cmd + " -n %(num)s -c '" + submitCommand + "'" command = jobstatus_cmd % locals() f.write("RUN COMMAND:\n" + str(command) + "\n") if num > 0: return submitjobs.runcmd(command) f.close()
def main(): try: parser = OptionParser() parser.add_option('-u', '--username', help='defined user in the cluster', dest='username') parser.add_option('-k', '--key', help='defined key for the workflow', dest='wkey') parser.add_option('-s', '--servicename', help='service name', dest='servicename') parser.add_option('-c', '--command', help='command that is goinf to be run', dest='com') parser.add_option('-n', '--name', help='name of the run', dest='name') parser.add_option('-o', '--outdir', help='output directory', dest='outdir') parser.add_option('-f', '--config', help='configuration parameter section', dest='config') (options, args) = parser.parse_args() except: print "OptionParser Error:for help use --help" sys.exit(2) USERNAME = options.username WKEY = options.wkey OUTDIR = options.outdir SERVICENAME = options.servicename COM = options.com NAME = options.name CONFIG = options.config python = "python" try: config = getConfig(CONFIG) fun = funcs() submitjobs = submitJobs(config['url'], fun) submitCommand = fun.getCommand(sys.argv) if (USERNAME == None): USERNAME = subprocess.check_output("whoami", shell=True).rstrip() print "USER:"******"\n" sdir = os.path.dirname(sys.argv[0]) exec_dir = os.path.dirname(os.path.abspath(__file__)) #print "EXECDIR" + exec_dir track = OUTDIR + "/tmp/track" src = OUTDIR + "/tmp/src" logs = OUTDIR + "/tmp/logs" os.system("mkdir -p " + track) os.system("mkdir -p " + src) os.system("mkdir -p " + logs) logfile = "%s/JOB.%s.log" % (logs, NAME) logging.basicConfig(filename=logfile, filemode='a', format='%(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p', level=logging.DEBUG) logging.info("File Path:%s" % os.getcwd()) success_file = track + "/" + str(NAME) + ".success" jobstatus_cmd = "python %(sdir)s/jobStatus.py -f %(CONFIG)s -u %(USERNAME)s -k %(WKEY)s -s %(SERVICENAME)s -t %(TYPE)s -o %(OUTDIR)s -j %(NAME)s -m %(MESSAGE)s" if not os.path.exists(success_file): f = open(src + "/" + NAME + ".tmp.bash", 'w') f.write("#!/bin/bash\n") f.write("#BEGINING-OF-FILE\n") f.write("JOB_NUM=$1\n") f.write("sleep 1\n") f.write("cd " + exec_dir + "\n") COMSTR = re.sub(r"'", r"''", COM) f.write("echo '" + str(COMSTR) + "'\n") MESSAGE = "2" TYPE = "dbSetStartTime" f.write(jobstatus_cmd % locals() + " -n $JOB_NUM") f.write( "\n retval=$?\n if [ $retval -ne 0 ]; then\n exit 66\n fi\n" ) #f.write("\n\nperl " + str(sdir) + "/runParallel.pl --command='"+ str(COM) +"'\n\n") f.write("\n\n" + str(COM) + "\n\n") f.write( "retval=$?\necho \"[\"$retval\"]\"\nif [ $retval -eq 0 ]; then\n" ) if (str(NAME) != str(SERVICENAME)): f.write("touch " + success_file + "\n") MESSAGE = "3" TYPE = "dbSetEndTime" f.write(jobstatus_cmd % locals() + " -n $JOB_NUM") f.write( "\n retval=$?\n if [ $retval -ne 0 ]; then\n exit 66\n fi\n" ) f.write(" echo success\nelse\n echo failed\n") MESSAGE = "0" f.write(jobstatus_cmd % locals() + " -n $JOB_NUM") f.write( "\n retval=$?\n if [ $retval -ne 0 ]; then\n exit 66\n fi\n" ) f.write(" exit 127\nfi\ndate\n") f.write("#END-OF-FILE\n") f.close() os.system("chmod 755 " + src + "/" + NAME + ".tmp.bash") f = open(src + "/" + NAME + ".submit.bash", 'w') f.write(src + "/" + NAME + ".tmp.bash $1> " + logs + "/$1.std 2>&1") f.close() os.system("chmod 755 " + src + "/" + NAME + ".submit.bash") command = src + "/" + NAME + ".submit.bash $$" print "\n\n\nCOM:" + command + "\n\n\n" pid = subprocess.Popen(command, shell=True).pid print "\n\n\nPID:" + str(pid) + "\n\n\n" MESSAGE = "1" TYPE = "dbSubmitJob" submitCommand = re.sub(r"'", r"''", submitCommand) jobstatus_cmd = jobstatus_cmd + " -n %(pid)s -c 'python " + submitCommand + "'" command = jobstatus_cmd % locals() print command #PUT TRY CATCH HERE if pid > 0: return submitjobs.runcmd(command, logging) except Exception, ex: fun.stop_err('Error (line:%s)running runJobs.py\n%s' % (format(sys.exc_info()[-1].tb_lineno), str(ex)))
def main(): try: #python finishJob.py -u kucukura -k nVy1THnthvrRWfXcj187KeDDQrNAkY -s splitFastQ parser = OptionParser() parser.add_option('-u', '--username', help='defined user in the cluster', dest='username') parser.add_option('-k', '--key', help='defined key for the workflow', dest='wkey') parser.add_option('-c', '--com', help='bash script of the command', dest='com') parser.add_option('-j', '--jobname', help='name of of the job', dest='jobname') parser.add_option('-s', '--servicename', help='service name', dest='servicename') parser.add_option('-t', '--type', help='type of the operation', dest='type') parser.add_option('-n', '--jobnum', help='submitted job number', dest='jobnum') parser.add_option('-m', '--message', help='resulting message of the job', dest='message') parser.add_option('-o', '--outdir', help='output directory', dest='outdir') parser.add_option('-f', '--config', help='config parameters section', dest='config') parser.add_option('-r', '--resources', help='used resources in the job submission', dest='resources') (options, args) = parser.parse_args() except: #parser.print_help() print "for help use --help" sys.exit(2) USERNAME = options.username WKEY = options.wkey COM = options.com JOBNAME = options.jobname SERVICENAME = options.servicename TYPE = options.type JOBNUM = options.jobnum MESSAGE = options.message OUTDIR = options.outdir CONFIG = options.config RESOURCES = options.resources f = funcs() config = getConfig(CONFIG) jobStat = jobStatus(config['url'], f) edir = os.path.dirname(sys.argv[0]) logfile = "%s/tmp/logs/JOBSTATUS.%s.log" % (OUTDIR, str(JOBNUM)) logging.basicConfig(filename=logfile, filemode='a', format='%(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p', level=logging.DEBUG) logging.info("File Path:%s" % os.getcwd()) logging.info("WKEY:" + str(WKEY)) logging.info("TYPE:" + str(TYPE)) if (TYPE == "dbSubmitJob"): res = jobStat.insertJob(USERNAME, WKEY, COM, JOBNAME, SERVICENAME, JOBNUM, RESOURCES, MESSAGE, logging) logging.info("\n\nInsert Job => SUBMIT JOB:\nres:" + str(res) + " wkey:" + str(WKEY) + " servicename:" + str(SERVICENAME) + " jobnum:" + str(JOBNUM) + "\n\n") elif (TYPE == "dbSetStartTime"): res = jobStat.updateJob(USERNAME, WKEY, JOBNAME, SERVICENAME, "start_time", JOBNUM, MESSAGE, logging) logging.info("\n\nUpdate Job => START JOB:\nres:" + str(res) + " wkey:" + str(WKEY) + " servicename:" + str(SERVICENAME) + " jobnum:" + str(JOBNUM) + "\n\n") elif (TYPE == "dbSetEndTime"): res = jobStat.updateJob(USERNAME, WKEY, JOBNAME, SERVICENAME, "end_time", JOBNUM, MESSAGE, logging) logging.info("\n\nUpdate Job => END JOB:\nres:" + str(res) + " wkey:" + str(WKEY) + " servicename:" + str(SERVICENAME) + " jobnum:" + str(JOBNUM) + "\n\n") resJobOut = jobStat.insertJobOut(USERNAME, WKEY, JOBNUM, OUTDIR, edir, logging) resALL = jobStat.checkAllJobsFinished(USERNAME, WKEY, SERVICENAME, logging) logging.info("\n\nCheck if All JOBS finished\nres:" + str(resALL) + " wkey:" + str(WKEY) + " servicename:" + str(SERVICENAME))
def main(): f = funcs() params_section = "" #define options parser = OptionParser() parser.add_option("-r", "--rungroupid", dest="rpid") parser.add_option("-b", "--backup", dest="backup") parser.add_option("-w", "--wkey", dest="wkey") parser.add_option("-c", "--config", dest="config") # parse options, args = parser.parse_args() # retrieve options rpid = options.rpid BACKUP = options.backup WKEY = options.wkey params_section = options.config boto = botoSubmit(f, params_section) boto.config.read("../config/config.ini") url = boto.config.get(params_section, "api_path") + '/api/service.php' boto.setURL(url) amazon = boto.getAmazonCredentials('kucukura') samplelist = boto.getSampleList(sys.argv[3], 'none') processedLibs = [] amazon_bucket = "" for sample in samplelist: print "\n" sample_id = sample['sample_id'] file_id = sample['file_id'] libname = sample['samplename'] filename = sample['file_name'] backup_dir = sample['backup_dir'] amazon_bucket = sample['amazon_bucket'] PAIRED = None if (filename.find(',') != -1): PAIRED = "Yes" boto.processFastqFiles(sample, PAIRED) processedLibs.append([libname, sample_id]) amazon_bucket = str(re.sub('s3://', '', amazon_bucket)) print libname + ":" + str(sample_id) if (boto.checkReadCounts(sample_id, 'ngs_fastq_files') and amazon): if (filename.find(',') != -1): files = filename.split(',') boto.uploadFile(amazon, amazon_bucket, backup_dir, libname + '.1.fastq.gz') boto.uploadFile(amazon, amazon_bucket, backup_dir, libname + '.2.fastq.gz') else: boto.uploadFile(amazon, amazon_bucket, backup_dir, libname + '.fastq.gz') boto.runSQL( "UPDATE ngs_fastq_files SET backup_checksum = NULL, aws_status = 0 WHERE sample_id in (" + sample_id + ")") else: print "ERROR 86: The # of read counts doesn't match: %s", libname sys.exit(86) sys.exit(0)
def main(): try: parser = OptionParser() parser.add_option('-i', '--inputparam', help='input parameters for the workflow', dest='inputparam') parser.add_option( '-p', '--defaultparam', help='defined parameter file that will be run on cluster', dest='defaultparam') parser.add_option('-u', '--username', help='defined user in the cluster', dest='username') parser.add_option('-k', '--wkey', help='defined key for the workflow', dest='wkey') parser.add_option('-w', '--workflowfile', help='workflow filename', dest='workflowfile') parser.add_option('-d', '--dbhost', help='dbhost name', dest='dbhost') parser.add_option('-o', '--outdir', help='output directory in the cluster', dest='outdir') parser.add_option('-f', '--config', help='configuration parameter section', dest='config') parser.add_option('-r', '--runid', help='runid', dest='runid') (options, args) = parser.parse_args() except: print "OptionParser Error:for help use --help" sys.exit(2) INPUTPARAM = options.inputparam DEFAULTPARAM = options.defaultparam USERNAME = options.username WKEY = options.wkey WORKFLOWFILE = options.workflowfile DBHOST = options.dbhost OUTDIR = options.outdir CONFIG = options.config RUNID = options.runid f = funcs() config = getConfig(CONFIG) workflow = runWorkflow(config['url'], f) LOGPATH = config['logpath'] #This section is just for username conversion in the cluster can be removed in the future if (CONFIG != "Docker" and CONFIG != "Travis" and CONFIG != "Amazon"): com = "grep " + USERNAME + " /project/umw_biocore/svcgalaxy/conv.file|awk '{print $2}'" USERNAME = str(os.popen(com).readline().rstrip()) ######## if (USERNAME and len(USERNAME) < 3): print "Error:Username doesn't exist" sys.exit(2) if (OUTDIR == None): OUTDIR = "~/out" if (OUTDIR.find("/") == -1): OUTDIR = "~/" + OUTDIR if (INPUTPARAM != None): if path.isfile(INPUTPARAM) and access(INPUTPARAM, R_OK): INPUTPARAM = workflow.import_param(INPUTPARAM) else: INPUTPARAM = re.sub(" ", "", INPUTPARAM) logging.basicConfig(filename=LOGPATH + '/run' + str(RUNID) + '.log', filemode='a', format='%(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p', level=logging.DEBUG) logging.info(USERNAME + ":" + OUTDIR) logging.info(INPUTPARAM) if (WKEY == None): WKEY = "start" else: workflow.updateRunParams(WKEY, RUNID, logging) services = workflow.import_workflow(WORKFLOWFILE, logging) slen = str(len(services)) wfbase = os.path.splitext(basename(WORKFLOWFILE))[0] wfname = wfbase.split('.')[0] wkey = workflow.startWorkflow(INPUTPARAM, DEFAULTPARAM, USERNAME, wfname, WKEY, OUTDIR, slen, logging) if (wkey.startswith("ERROR")): logging.warning("ERROR:" + wkey) print "Check the parameter files:\n" sys.exit(2) print "WORKFLOW STARTED:" + wkey + "\n" logging.info('WORKFLOW STARTED' + wkey + "\n") workflow.updateRunParams(wkey, RUNID, logging) for service in services: br = 1 checkcount = 0 while (br == 1): ret = workflow.startService(service, wkey, USERNAME, logging) print ret + "\n" time.sleep(5) if (ret.startswith("RUNNING") and float(service.waittime) > 0): time.sleep(float(service.waittime)) elif (ret.startswith("ERROR")): print service.servicename + ":" + ret + "\n" logging.warning("ERROR:" + ret) logging.warning("ERROR:" + service.command) print "Check the command:\n" print service.command + "\n" sys.exit(2) elif (ret.startswith("DONE")): checkcount = 0 br = 0 checkcount = checkcount + 1 br = 1 print "All the services Ended" while (br == 1): res = workflow.endWorkflow(wkey, logging) #print ret + "\n" if (ret.startswith("WRUNNING")): time.sleep(5) else: br = 0
def main(): try: parser = OptionParser() parser.add_option('-b', '--barcode', help='barcode', dest='barcode') parser.add_option('-j', '--jobsubmit', help='jobsubmit', dest='jobsubmit') parser.add_option('-r', '--runparamsid', help='group id', dest='runparamsid') parser.add_option('-u', '--username', help='username', dest='username') parser.add_option('-f', '--fastqdump', help='fastqdump', dest='fastqdump') parser.add_option('-o', '--outdir', help='output directory', dest='outdir') parser.add_option('-c', '--config', help='config parameters section', dest='config') (options, args) = parser.parse_args() except: parser.print_help() print "for help use --help" sys.exit(2) BARCODE = options.barcode USERNAME = options.username FASTQDUMP = options.fastqdump RUNPARAMSID = options.runparamsid JOBSUBMIT = options.jobsubmit OUTDIR = options.outdir CONFIG = options.config f = funcs() config = getConfig(CONFIG) download = stepDownload(config['url'], f) if (OUTDIR == None or JOBSUBMIT == None): print "for help use --help" sys.exit(2) print BARCODE print JOBSUBMIT print OUTDIR print USERNAME filelist = download.getFileList(RUNPARAMSID, BARCODE) sralist = download.getGSMs(RUNPARAMSID) print filelist for sample in filelist: libname = sample['samplename'] sample_id = sample['sample_id'] filename = sample['file_name'] fastq_dir = sample['fastq_dir'] dir_id = sample['dir_id'] backup_dir = sample['backup_dir'] for gsm in sralist: if sample_id == gsm['id']: sra_file = gsm['title'] print libname print filename print fastq_dir print backup_dir com = download.parseGEO(libname, sra_file, OUTDIR, RUNPARAMSID, FASTQDUMP, JOBSUBMIT) com = com + download.cleanGEO(sample_id, sra_file, libname, fastq_dir, OUTDIR, RUNPARAMSID) download.submitJob(JOBSUBMIT, sra_file.split(".")[0], com) sys.exit(0)