def _predict_hook(self, my_task): split_n = valueof(my_task, self.times) if split_n is None: return my_task._set_internal_attribute(splits = split_n) # Create the outgoing nodes. outputs = [] for i in range(split_n): outputs += self.outputs if my_task._has_state(Task.LIKELY): child_state = Task.LIKELY else: child_state = Task.FUTURE my_task._update_children(outputs, child_state)
def try_fire(self, my_task): # If the threshold was already reached, there is nothing else to do. if my_task._has_state(Task.COMPLETED): return False if my_task._has_state(Task.READY): return True # Retrieve a list of all activated tasks from the associated # task that did the conditional parallel split. split_node = my_task._find_ancestor_from_name(self.split_task) if split_node is None: msg = 'Join with %s, which was not reached' % self.split_task raise WorkflowException(self, msg) nodes = split_node.spec._get_activated_threads(split_node) # The default threshold is the number of threads that were started. threshold = valueof(my_task, self.threshold) if threshold is None: threshold = len(nodes) # Look up which tasks have already completed. waiting_nodes = [] completed = 0 for node in nodes: # Refresh path prediction. node.spec._predict(node) if self._branch_is_complete(node): completed += 1 else: waiting_nodes.append(node) # If the threshold was reached, get ready to fire. if completed >= threshold: # If this is a cancelling join, cancel all incoming branches, # except for the one that just completed. if self.cancel_remaining: for node in waiting_nodes: node.cancel() return True # We do NOT set the task state to COMPLETED, because in # case all other incoming tasks get cancelled (or never reach # the ThreadMerge for other reasons, such as reaching a stub branch), # we need to revisit it. return False
def _try_fire_structured(self, my_task, force = False): # If the threshold was already reached, there is nothing else to do. if my_task._has_state(Task.READY): return True if my_task._has_state(Task.COMPLETED): return False # Retrieve a list of all activated tasks from the associated # task that did the conditional parallel split. split_task = my_task._find_ancestor_from_name(self.split_task) if split_task is None: msg = 'Join with %s, which was not reached' % self.split_task raise WorkflowException(self, msg) tasks = split_task.spec._get_activated_tasks(split_task, my_task) # The default threshold is the number of branches that were started. threshold = valueof(my_task, self.threshold) if threshold is None: threshold = len(tasks) # Look up which tasks have already completed. waiting_tasks = [] completed = 0 for task in tasks: # Refresh path prediction. task.spec._predict(task) if not self._branch_may_merge_at(task): completed += 1 elif self._branch_is_complete(task): completed += 1 else: waiting_tasks.append(task) # If the threshold was reached, get ready to fire. if force or completed >= threshold: self._fire(my_task, waiting_tasks) return True # We do NOT set the task state to COMPLETED, because in # case all other incoming tasks get cancelled (or never reach # the Join for other reasons, such as reaching a stub branch), we # need to revisit it. return False
def _on_ready_before_hook(self, my_task): file = valueof(my_task, self.file) xml_reader = XmlReader() workflow_list = xml_reader.parse_file(file) workflow = workflow_list[0] outer_job = my_task.job.outer_job subjob = SpiffWorkflow.Job(workflow, parent = outer_job) subjob.signal_connect('completed', self._on_subjob_completed, my_task) # Integrate the tree of the subjob into the tree of this job. my_task._update_children(self.outputs, Task.FUTURE) for child in my_task.children: child._inherit_attributes() for child in subjob.task_tree.children: my_task.children.insert(0, child) child.parent = my_task my_task._set_internal_attribute(subjob = subjob) return True
def _try_fire_unstructured(self, my_task, force = False): # If the threshold was already reached, there is nothing else to do. if my_task._has_state(Task.COMPLETED): return False if my_task._has_state(Task.READY): return True # The default threshold is the number of inputs. threshold = valueof(my_task, self.threshold) if threshold is None: threshold = len(self.inputs) # Look at the tree to find all places where this task is used. tasks = [] for input in self.inputs: for task in my_task.job.task_tree: if task.thread_id != my_task.thread_id: continue if task.spec != input: continue tasks.append(task) # Look up which tasks have already completed. waiting_tasks = [] completed = 0 for task in tasks: if task.parent is None or task._has_state(Task.COMPLETED): completed += 1 else: waiting_tasks.append(task) # If the threshold was reached, get ready to fire. if force or completed >= threshold: self._fire(my_task, waiting_tasks) return True # We do NOT set the task state to COMPLETED, because in # case all other incoming tasks get cancelled (or never reach # the Join for other reasons, such as reaching a stub branch), we # we need to revisit it. return False