Exemplo n.º 1
0
 # Set up and run pipeline
 pipeline = Pipeline.PipelineRunner(runner,max_concurrent_jobs=options.max_concurrent_jobs,
                                    jobCompletionHandler=JobCleanup,
                                    groupCompletionHandler=lambda group,jobs,
                                    email=options.email_addr: SendReport(email,group,jobs))
 for data_dir in data_dirs:
     # Get for this directory
     print "Collecting data from %s" % data_dir
     if options.input_type == "solid":
         run_data = Pipeline.GetSolidDataFiles(data_dir,pattern=options.pattern)
     elif options.input_type == "solid_paired_end":
         run_data = Pipeline.GetSolidPairedEndFiles(data_dir,pattern=options.pattern)
     elif options.input_type == "fastq":
         run_data = Pipeline.GetFastqFiles(data_dir,pattern=options.pattern)
     elif options.input_type == "fastqgz":
         run_data = Pipeline.GetFastqGzFiles(data_dir,pattern=options.pattern)
     else:
         logging.error("Unknown input type: '%s'" % options.input_type)
         sys.exit(1)
     # Add jobs to pipeline runner (up to limit of max_total_jobs)
     for data in run_data:
         if options.max_total_jobs > 0 and pipeline.nWaiting() == options.max_total_jobs:
             print "Maximum number of jobs queued (%d)" % options.max_total_jobs
             break
         label = os.path.splitext(os.path.basename(data[0]))[0]
         group = os.path.basename(data_dir)
         # Set up argument list for script
         args = []
         if script_args:
             for arg in script_args:
                 args.append(arg)