if args.verbose: print "BlockTLScript:", args if not args.do_sigproc and not args.do_basecalling and not args.do_alignment: parser.print_help() sys.exit(1) #ensure we permit read/write for owner and group output files. os.umask(0002) blockprocessing.printheader() env, warn = explogparser.getparameter() print warn blockprocessing.write_version() sys.stdout.flush() sys.stderr.flush() #------------------------------------------------------------- # Connect to Job Server #------------------------------------------------------------- try: jobserver = xmlrpclib.ServerProxy( "http://%s:%d" % (cluster_settings.JOBSERVER_HOST, cluster_settings.JOBSERVER_PORT), verbose=False, allow_none=True) primary_key_file = os.path.join(os.getcwd(), 'primary.key') except: traceback.print_exc()
(JOBSERVER_HOST, JOBSERVER_PORT), verbose=False, allow_none=True) except (socket.error, xmlrpclib.Fault): traceback.print_exc() printheader() primary_key_file = os.path.join(os.getcwd(), 'primary.key') try: jobserver.updatestatus(primary_key_file, 'Started', True) except: traceback.print_exc() # Software version write_version() # *** Barcodes *** barcodeSet, barcode_files, barcodeSet_Info = find_barcodes_to_process( parentBAMs, barcodeSet) if barcodeSet: bc_jobs = [] #zipname = '_'+ env['resultsName']+ '.barcode.bam.zip' #zip_args = ['--zip', zipname] stats_args = ['--align-stats'] # launch merge jobs, one per barcode for bcname, barcode_file_dict in barcode_files.iteritems(): filename = barcode_file_dict['filename']
args = parser.parse_args() if args.verbose: print "MergeTLScript:",args if not args.do_sigproc and not args.do_basecalling and not args.do_zipping: parser.print_help() sys.exit(1) #ensure we permit read/write for owner and group output files. os.umask(0002) blockprocessing.printheader() env,warn = explogparser.getparameter() blockprocessing.write_version() sys.stdout.flush() sys.stderr.flush() #------------------------------------------------------------- # Connect to Job Server #------------------------------------------------------------- try: jobserver = xmlrpclib.ServerProxy("http://%s:%d" % (cluster_settings.JOBSERVER_HOST, cluster_settings.JOBSERVER_PORT), verbose=False, allow_none=True) primary_key_file = os.path.join(os.getcwd(),'primary.key') except: traceback.print_exc()
try: jobserver = xmlrpclib.ServerProxy("http://%s:%d" % (JOBSERVER_HOST, JOBSERVER_PORT), verbose=False, allow_none=True) except (socket.error, xmlrpclib.Fault): traceback.print_exc() printheader() primary_key_file = os.path.join(os.getcwd(),'primary.key') try: jobserver.updatestatus(primary_key_file,'Started',True) except: traceback.print_exc() # Software version write_version() # *** Barcodes *** barcodeSet, barcode_files, barcodeSet_Info = find_barcodes_to_process(parentBAMs, barcodeSet) if barcodeSet: bc_jobs = [] #zipname = '_'+ env['resultsName']+ '.barcode.bam.zip' #zip_args = ['--zip', zipname] stats_args = ['--align-stats'] # launch merge jobs, one per barcode for bcname,barcode_file_dict in barcode_files.iteritems(): filename = barcode_file_dict['filename'] jobId = ""