option_parser.add_option('-l', '--log_dir') (options, args) = option_parser.parse_args() # check that we get the qiime-required arguments if len(args) == 2: jobs_list_file = args[0] job_id = args[1] elif len(args) == 0: raise ValueError, "Didn't get the right command line arguments" # make a directory for holding LSF log files if options.log_dir == None: log_dir = os.path.join(os.environ['HOME'], 'qiime_parallel_logs') else: log_dir = options.log_dir if not os.path.exists(log_dir): os.mkdir(log_dir, 0755) # submit the jobs jobs_handle = open(jobs_list_file, 'r') job_ids = [] logs = [] for (i, line) in enumerate(jobs_handle): datetimestamp = datetime.datetime.now().strftime('%Y%m%d_%H%M%S') log = os.path.join(log_dir, 'job_%i_%s.log' % (i, datetimestamp)) job_id = lsf.submit_to_LSF(options.queue, log, line.strip()) job_ids.append(job_id) logs.append(log) jobs_handle.close()
option_parser.add_option('-l','--log_dir') (options,args) = option_parser.parse_args() # check that we get the qiime-required arguments if len(args) == 2: jobs_list_file = args[0] job_id = args[1] elif len(args) == 0: raise ValueError, "Didn't get the right command line arguments" # make a directory for holding LSF log files if options.log_dir == None: log_dir = os.path.join(os.environ['HOME'],'qiime_parallel_logs') else: log_dir = options.log_dir if not os.path.exists(log_dir): os.mkdir(log_dir,0755) # submit the jobs jobs_handle = open(jobs_list_file,'r') job_ids = [] logs = [] for (i,line) in enumerate(jobs_handle): datetimestamp = datetime.datetime.now().strftime('%Y%m%d_%H%M%S') log = os.path.join( log_dir, 'job_%i_%s.log' % (i,datetimestamp) ) job_id = lsf.submit_to_LSF(options.queue,log,line.strip()) job_ids.append(job_id) logs.append(log) jobs_handle.close()
cmd += ' | align_vdj.py' + locus_options # 6. VDJ CLASSIFICATION cmd += ' | translate_chains.py' # 7. TRANSLATION # submit cmd to LSF for each part log("Submitting jobs to LSF...") jobIDs = [] logfiles = [] outnames = [] for part in parts: partID = part.split('.')[-1] partoutname = join(work_dir, 'parts/aligned.imgt.' + partID) outnames.append(partoutname) curr_cmd = 'cat %s | ' + cmd + ' > %s' curr_cmd = curr_cmd % (part, partoutname) logfile = join(work_dir, 'logs/alignment.log.' + partID) jobID = lsf.submit_to_LSF(short_queue, logfile, curr_cmd) logfiles.append(logfile) jobIDs.append(jobID) log("finished\n") log("Waiting for LSF jobs...") lsf.wait_for_LSF_jobs(jobIDs, logfiles) log("finished\n") # 8. CONCAT PARTS log("Concatenating pieces...") aligned_file = join(work_dir, basename + '.aligned.imgt') cmd = 'find %s -name "aligned.imgt.*" | xargs cat > %s' % (join( work_dir, 'parts'), aligned_file) p = subprocess.Popen(cmd, shell=True) p.wait()
cmd += ' | align_vdj.py' + locus_options # 6. VDJ CLASSIFICATION cmd += ' | translate_chains.py' # 7. TRANSLATION # submit cmd to LSF for each part log("Submitting jobs to LSF...") jobIDs = [] logfiles = [] outnames = [] for part in parts: partID = part.split('.')[-1] partoutname = join(work_dir,'parts/aligned.imgt.'+partID) outnames.append(partoutname) curr_cmd = 'cat %s | ' + cmd + ' > %s' curr_cmd = curr_cmd % (part,partoutname) logfile = join(work_dir,'logs/alignment.log.'+partID) jobID = lsf.submit_to_LSF(short_queue,logfile,curr_cmd) logfiles.append(logfile) jobIDs.append(jobID) log("finished\n") log("Waiting for LSF jobs...") lsf.wait_for_LSF_jobs(jobIDs,logfiles) log("finished\n") # 8. CONCAT PARTS log("Concatenating pieces...") aligned_file = join(work_dir,basename+'.aligned.imgt') cmd = 'find %s -name "aligned.imgt.*" | xargs cat > %s' % (join(work_dir,'parts'),aligned_file) p = subprocess.Popen(cmd,shell=True)