def pipeline(task, wkname, conf=None): logindent(2) if not task: # in this case, conf is expected source_seqtype = "aa" if "aa" in GLOBALS["seqtypes"] else "nt" all_seqs = GLOBALS["target_sequences"] initial_task = Msf(set(all_seqs), set(), seqtype=source_seqtype) initial_task.main_tree = None initial_task.threadid = generate_runid() initial_task.configid = initial_task.threadid initial_task.target_wkname = wkname # Register node db.add_node(initial_task.threadid, initial_task.nodeid, initial_task.cladeid, initial_task.target_seqs, initial_task.out_seqs) new_tasks = [initial_task] else: conf = GLOBALS[task.configid] npr_conf = IterConfig(conf, wkname, task.size, task.seqtype) new_tasks = process_task(task, wkname, npr_conf, conf["_nodeinfo"]) process_new_tasks(task, new_tasks, conf) logindent(-2) return new_tasks
def load_jobs(self): # I want a single phylognetic tree for each cog from nprlib.workflow.genetree import pipeline for co in self.cogs: # Register a new msf task for each COG, using the same # config file but opening an new tree reconstruction # thread. job = Msf(set(co), set(), seqtype = self.seqtype) job.main_tree = None job.threadid = generate_runid() job.configid = self.conf["_configid"] # This converts the job in a workflow job. As soon as a # task is done, it will be automatically processed and the # new tasks will be registered as new jobs. job.task_processor = pipeline job.target_wkname = self.genetree_workflow self.jobs.append(job) self.cog_ids.add(job.nodeid)
if npr_conf.max_iters and current_iter >= npr_conf.max_iters: log.warning("Maximum number of iterations reached!") else: # Add new nodes source_seqtype = "aa" if "aa" in GLOBALS["seqtypes"] else "nt" ttree, mtree = task.task_tree, task.main_tree log.log(26, "Processing tree: %s seqs, %s outgroups", len(target_seqs), len(out_seqs)) alg_path = node_info.get("clean_alg_path", node_info["alg_path"]) for node, seqs, outs, wkname in get_next_npr_node(threadid, ttree, task.out_seqs, mtree, alg_path, npr_conf): log.log(24, "Registering new node: %s seqs, %s outgroups", len(seqs), len(outs)) new_task_node = Msf(seqs, outs, seqtype=source_seqtype) new_task_node.target_wkname = wkname new_tasks.append(new_task_node) return new_tasks def pipeline(task, wkname, conf=None): logindent(2) if not task: # in this case, conf is expected source_seqtype = "aa" if "aa" in GLOBALS["seqtypes"] else "nt" all_seqs = GLOBALS["target_sequences"] initial_task = Msf(set(all_seqs), set(), seqtype=source_seqtype) initial_task.main_tree = None initial_task.threadid = generate_runid() initial_task.configid = initial_task.threadid