def main(): global code, fasta_in, results_dir, nohoms, cleanup, frag_config # Create a parser, add options, parse options, and parse leftover required positional args. parser = OptionParser(usage=usage_str) set_options(parser) (options, args) = parser.parse_args() (code, fasta_in, results_dir) = parse_positional_args(args, parser) # Set config file, if specified. if options.config_file: frag_config = options.config_file # Set remaining globals nohoms = options.nohoms cleanup = options.cleanup # Create a processing pool (via hpf.processing) and make tasks to serve to the frag_driver. print "main:: Creating processor pool." pool = processor() print "main:: Serializing tasks based on input string: {0}.".format( fasta_in) pool.make_tasks(tasks, fasta_in) print "main:: Running tasks." consume(pool.run(frag_driver))
def main(): queryfile_filestring = '/scratch/jamboree/fasta_split/hpd.fasta.*' pool = processor() pool.make_tasks(tasks, queryfile_filestring) consume(pool.run(run_blast))
def main(*args): runtime().set_debug(1) pool = processor(synchronous=runtime().opt(SYNCHRONOUS), raise_errors=False) runtime().debug("Using processor",pool) pool.make_tasks(tasks) mcm_tasks = [t for t in pool.run(prep) if t !=None] import cPickle with open("exported.pickle","w") as handle: cPickle.dump(mcm_tasks,handle)
def main(*args): # calls processor with a the keyword argument 'synchronous'. runtime() creates a new Runtime obj. or accesses # the existing runtime object, and .opt(SYNCHRONOUS) returns the value of the synchronous option of the Runtime # object. # processor returns a Map, SGEArray, or PBSArrayProcessor object. NOTE: currently, PBSArrayProcessor is not fully implemented. pool = processor(synchronous=runtime().opt(SYNCHRONOUS)) runtime().debug("Using processor",pool) pool.make_tasks(None) consume(pool.run(None))
def main(): # Simulated input string (dir containing fasta files). input_str="/Users/dpb/bonneau-dev/sandbox/frag-input/oi_test/*.fasta" # Create a processing pool (via hpf.processing) and make tasks to serve to the process function. pool = processor() # Input to make_tasks can be any number of arguments, as they are passed directly to the given function. pool.make_tasks(tasks, input_str) consume(pool.run(process))
def main(*args): pool = processor(synchronous=runtime().opt(SYNCHRONOUS)) runtime().debug("Using processor", pool) pool.make_tasks(lambda: zip( args, # [args[i] for i in range(0,len(args),2)], # [args[i] for i in range(1,len(args),2)], repeat(runtime().opt(MAX_SIZE), len(args)), repeat(runtime().opt(NEXUS), len(args)), repeat(runtime().opt(DIRECTORY), len(args)))) consume(pool.run(_split))
def main(*args): pool = processor(synchronous=runtime().opt(SYNCHRONOUS)) runtime().debug("Using processor",pool) pool.make_tasks(lambda: zip(args, # [args[i] for i in range(0,len(args),2)], # [args[i] for i in range(1,len(args),2)], repeat(runtime().opt(MAX_SIZE),len(args)), repeat(runtime().opt(NEXUS),len(args)), repeat(runtime().opt(DIRECTORY),len(args)) )) consume(pool.run(_split))
def main(): # TODO: make a pfam flag commandline argument # TODO: make output dir a commandline argument # TODO: make experiment a commandline argument experiment_id = 1171 print "Interpro driver:: Creating processor pool, no SYNCHRONOUS option" pool = processor() pool.make_tasks(tasks, experiment_id) print "Running tasks..." #consume(pool.run(interpro_driver)) consume(pool.run(pfam_driver))
def main(): global code, fasta_in, results_dir, nohoms, cleanup, frag_config # Create a parser, add options, parse options, and parse leftover required positional args. parser = OptionParser(usage=usage_str) set_options(parser) (options, args) = parser.parse_args() (code, fasta_in, results_dir) = parse_positional_args(args, parser) # Set config file, if specified. if options.config_file: frag_config = options.config_file # Set remaining globals nohoms = options.nohoms cleanup = options.cleanup # Create a processing pool (via hpf.processing) and make tasks to serve to the frag_driver. print "main:: Creating processor pool." pool = processor() print "main:: Serializing tasks based on input string: {0}.".format(fasta_in) pool.make_tasks(tasks, fasta_in) print "main:: Running tasks." consume(pool.run(frag_driver))
def main(*args): pool = processor(synchronous=runtime().opt(SYNCHRONOUS)) runtime().debug("Using processor",pool) pool.make_tasks(lambda: args) consume(pool.run(_blast))
def main(*args): pool = processor(synchronous=runtime().opt(SYNCHRONOUS)) runtime().debug("Using processor", pool) pool.make_tasks(lambda: args) consume(pool.run(_blast))
def main(*args): pool = processor(synchronous=runtime().opt(SYNCHRONOUS)) runtime().debug("Using processor",pool) pool.make_tasks(None) consume(pool.run(remcm))
def main(*args): pool = processor(synchronous=runtime().opt(SYNCHRONOUS)) runtime().debug("Using processor",pool) pool.make_tasks(lambda: args) families = [f for f in pool.run(family) if not isinstance(f,Exception)] plot(families)
def main(*args): # Clear the db mappings, to use one engine per subprocess pool = processor(synchronous=runtime().opt(SYNCHRONOUS)) runtime().debug("Using processor",pool) pool.make_tasks(lambda: args) consume(pool.run(_import_family))
def main(): print "Struct all-v-all main" pool = processor() pool.make_tasks(tasks, query_struct_pkl, num_pieces) consume(pool.run(do_allvall))
def main(*args): pool = processor(synchronous=runtime().opt(SYNCHRONOUS)) runtime().debug("Using processor",pool) pool.make_tasks(lambda: [(fasta, runtime().opt(PARTS)) for fasta in args]) consume(pool.run(lambda args: split(*args)))
def main(*args): pool = processor() runtime().debug("Using processor",pool) pool.make_tasks(tasks,*args) consume(pool.run(_mcm))
def main(*args): # Clear the db mappings, to use one engine per subprocess pool = processor(synchronous=runtime().opt(SYNCHRONOUS)) runtime().debug("Using processor", pool) pool.make_tasks(lambda: args) consume(pool.run(_import_family))
def main(): # TODO: Create a parser for cmdline options # Create a processing pool (via hpf.processing) and make tasks to serve to the cluster_driver pool = processor() pool.make_tasks(tasks) consume(pool.run(cluster_driver))
def main(*args): pool = processor(synchronous=runtime().opt(SYNCHRONOUS),processors=runtime().opt(PROCESSORS)) runtime().debug("Using processor",pool) pool.make_tasks(tasks) consume(pool.run(_publications,result=upload))
def main(): global query_struct_pkl, num_pieces pool = processor() pool.make_tasks(tasks, query_struct_pkl, num_pieces) consume(pool.run(do_allvall))