def _send_call(self, my_task): """Sends Celery asynchronous call and stores async call information for retrieval laster""" args, kwargs, queue = [], {}, None if self.args: args = _eval_args(self.args, my_task) if self.kwargs: kwargs = _eval_kwargs(self.kwargs, my_task) if self.call_server_id: queue = 'server.{0}'.format(valueof(my_task, self.call_server_id)) elif self.call_queue: queue = valueof(my_task, self.call_queue) LOG.debug("%s (task id %s) calling %s", self.name, my_task.id, self.call, extra=dict(data=dict(args=args, kwargs=kwargs))) # Add current workflow information kwargs['workflow'] = {'data': my_task.workflow.data} async_call = current_app().send_task(self.call, args=args, kwargs=kwargs, queue=queue) my_task.internal_data['task_id'] = async_call.task_id my_task.internal_data['async_call'] = async_call LOG.debug("'%s' called: %s", self.call, async_call.task_id)
def _try_fire_unstructured(self, my_task, force=False): # The default threshold is the number of inputs. threshold = valueof(my_task, self.threshold) if threshold is None: threshold = len(self.inputs) # Look at the tree to find all places where this task is used. tasks = [] for input in self.inputs: for task in my_task.workflow.task_tree: if task.thread_id != my_task.thread_id: continue if task.task_spec != input: continue tasks.append(task) # Look up which tasks have already completed. waiting_tasks = [] completed = 0 for task in tasks: if task.parent is None or task._has_state(Task.COMPLETED): completed += 1 else: waiting_tasks.append(task) # If the threshold was reached, get ready to fire. return force or completed >= threshold, waiting_tasks
def _try_fire_structured(self, my_task, force=False): # Retrieve a list of all activated tasks from the associated # task that did the conditional parallel split. split_task = my_task._find_ancestor_from_name(self.split_task) if split_task is None: msg = "Join with %s, which was not reached" % self.split_task raise WorkflowException(self, msg) tasks = split_task.task_spec._get_activated_tasks(split_task, my_task) # The default threshold is the number of branches that were started. threshold = valueof(my_task, self.threshold) if threshold is None: threshold = len(tasks) # Look up which tasks have already completed. waiting_tasks = [] completed = 0 for task in tasks: # Refresh path prediction. task.task_spec._predict(task) if not self._branch_may_merge_at(task): completed += 1 elif self._branch_is_complete(task): completed += 1 else: waiting_tasks.append(task) # If the threshold was reached, get ready to fire. return force or completed >= threshold, waiting_tasks
def _try_fire_structured(self, my_task, force=False): # Retrieve a list of all activated tasks from the associated # task that did the conditional parallel split. split_task = my_task._find_ancestor_from_name(self.split_task) if split_task is None: msg = 'Join with %s, which was not reached' % self.split_task raise WorkflowException(self, msg) tasks = split_task.task_spec._get_activated_tasks(split_task, my_task) # The default threshold is the number of branches that were started. threshold = valueof(my_task, self.threshold) if threshold is None: threshold = len(tasks) # Look up which tasks have already completed. waiting_tasks = [] completed = 0 for task in tasks: # Refresh path prediction. task.task_spec._predict(task) if not self._branch_may_merge_at(task): completed += 1 elif self._branch_is_complete(task): completed += 1 else: waiting_tasks.append(task) # If the threshold was reached, get ready to fire. return force or completed >= threshold, waiting_tasks
def _create_subworkflow(self, my_task): from SpiffWorkflow.specs import WorkflowSpec file = valueof(my_task, self.file) serializer = self.serializer_cls() s_state = open(file).read() wf_spec = WorkflowSpec.deserialize(serializer, s_state, filename = file) outer_workflow = my_task.workflow.outer_workflow return SpiffWorkflow.Workflow(wf_spec, parent = outer_workflow)
def _create_subworkflow(self, my_task): from SpiffWorkflow.specs import WorkflowSpec file = valueof(my_task, self.file) serializer = self.serializer_cls() s_state = open(file).read() wf_spec = WorkflowSpec.deserialize(serializer, s_state, filename=file) outer_workflow = my_task.workflow.outer_workflow return SpiffWorkflow.Workflow(wf_spec, parent=outer_workflow)
def _create_subworkflow(self, my_task): from SpiffWorkflow.storage import XmlSerializer from SpiffWorkflow.specs import WorkflowSpec file = valueof(my_task, self.file) serializer = XmlSerializer() xml = open(file).read() wf_spec = WorkflowSpec.deserialize(serializer, xml, filename = file) outer_workflow = my_task.workflow.outer_workflow return SpiffWorkflow.Workflow(wf_spec, parent = outer_workflow)
def _eval_args(args, my_task): """Parses args and evaluates any Attrib entries""" results = [] for arg in args: if isinstance(arg, Attrib) or isinstance(arg, PathAttrib): results.append(valueof(my_task, arg)) else: results.append(arg) return results
def _eval_kwargs(kwargs, my_task): """Parses kwargs and evaluates any Attrib entries""" results = {} for kwarg, value in kwargs.iteritems(): if isinstance(value, Attrib) or isinstance(value, PathAttrib): results[kwarg] = valueof(my_task, value) else: results[kwarg] = value return results
def _create_subworkflow(self, my_task): from SpiffWorkflow.storage import XmlSerializer from SpiffWorkflow.specs import WorkflowSpec file = valueof(my_task, self.file) serializer = XmlSerializer() xml = open(file).read() wf_spec = WorkflowSpec.deserialize(serializer, xml, filename=file) outer_workflow = my_task.workflow.outer_workflow return SpiffWorkflow.Workflow(wf_spec, parent=outer_workflow)
def _send_call(self, my_task): """Sends Celery asynchronous call and stores async call information for retrieval laster""" args, kwargs, queue = [], {}, None if self.args: args = _eval_args(self.args, my_task) if self.kwargs: kwargs = _eval_kwargs(self.kwargs, my_task) if self.call_server_id: queue = 'server.{0}'.format(valueof(my_task, self.call_server_id)) elif self.call_queue: queue = valueof(my_task, self.call_queue) LOG.debug("%s (task id %s) calling %s", self.name, my_task.id, self.call, extra=dict(data=dict(args=args, kwargs=kwargs))) # Add current workflow information kwargs['workflow'] = { 'data': my_task.workflow.data } async_call = current_app().send_task(self.call, args=args, kwargs=kwargs, queue=queue) my_task.internal_data['task_id'] = async_call.task_id my_task.internal_data['async_call'] = async_call LOG.debug("'%s' called: %s", self.call, async_call.task_id)
def _predict_hook(self, my_task): split_n = valueof(my_task, self.times) if split_n is None: return my_task._set_internal_attribute(splits = split_n) # Create the outgoing tasks. outputs = [] for i in range(split_n): outputs += self.outputs if my_task._is_definite(): my_task._sync_children(outputs, Task.FUTURE) else: my_task._sync_children(outputs, Task.LIKELY)
def _predict_hook(self, my_task): split_n = valueof(my_task, self.times) if split_n is None: return my_task._set_internal_data(splits = split_n) # Create the outgoing tasks. outputs = [] for i in range(split_n): outputs += self.outputs if my_task._is_definite(): my_task._sync_children(outputs, Task.FUTURE) else: my_task._sync_children(outputs, Task.LIKELY)
def _predict_hook(self, my_task): split_n = valueof(my_task, self.times) if split_n is None: return my_task._set_internal_attribute(splits=split_n) # Create the outgoing tasks. outputs = [] for i in range(split_n): outputs += self.outputs if my_task._has_state(Task.LIKELY): child_state = Task.LIKELY else: child_state = Task.FUTURE my_task._update_children(outputs, child_state)
def _predict_hook(self, my_task): split_n = valueof(my_task, self.times) if split_n is None: return my_task._set_internal_attribute(splits = split_n) # Create the outgoing tasks. outputs = [] for i in range(split_n): outputs += self.outputs if my_task._has_state(Task.LIKELY): child_state = Task.LIKELY else: child_state = Task.FUTURE my_task._update_children(outputs, child_state)
def _try_fire(self, my_task): # If the threshold was already reached, there is nothing else to do. if my_task._has_state(Task.COMPLETED): return False if my_task._has_state(Task.READY): return True # Retrieve a list of all activated tasks from the associated # task that did the conditional parallel split. split_task = my_task._find_ancestor_from_name(self.split_task) if split_task is None: msg = 'Join with %s, which was not reached' % self.split_task raise WorkflowException(self, msg) tasks = split_task.task_spec._get_activated_threads(split_task) # The default threshold is the number of threads that were started. threshold = valueof(my_task, self.threshold) if threshold is None: threshold = len(tasks) # Look up which tasks have already completed. waiting_tasks = [] completed = 0 for task in tasks: # Refresh path prediction. task.task_spec._predict(task) if self._branch_is_complete(task): completed += 1 else: waiting_tasks.append(task) # If the threshold was reached, get ready to fire. if completed >= threshold: # If this is a cancelling join, cancel all incoming branches, # except for the one that just completed. if self.cancel_remaining: for task in waiting_tasks: task.cancel() return True # We do NOT set the task state to COMPLETED, because in # case all other incoming tasks get cancelled (or never reach # the ThreadMerge for other reasons, such as reaching a stub branch), # we need to revisit it. return False
def _on_ready_before_hook(self, my_task): file = valueof(my_task, self.file) xml_reader = XmlReader() workflow_list = xml_reader.parse_file(file) workflow = workflow_list[0] outer_workflow = my_task.workflow.outer_workflow subworkflow = SpiffWorkflow.Workflow(workflow, parent = outer_workflow) subworkflow.completed_event.connect(self._on_subworkflow_completed, my_task) # Integrate the tree of the subworkflow into the tree of this workflow. my_task._update_children(self.outputs, Task.FUTURE) for child in my_task.children: child._inherit_attributes() for child in subworkflow.task_tree.children: my_task.children.insert(0, child) child.parent = my_task my_task._set_internal_attribute(subworkflow = subworkflow) return True
def _on_ready_before_hook(self, my_task): file = valueof(my_task, self.file) xml_reader = XmlReader() workflow_list = xml_reader.parse_file(file) workflow = workflow_list[0] outer_workflow = my_task.workflow.outer_workflow subworkflow = SpiffWorkflow.Workflow(workflow, parent=outer_workflow) subworkflow.completed_event.connect(self._on_subworkflow_completed, my_task) # Integrate the tree of the subworkflow into the tree of this workflow. my_task._update_children(self.outputs, Task.FUTURE) for child in my_task.children: child._inherit_attributes() for child in subworkflow.task_tree.children: my_task.children.insert(0, child) child.parent = my_task my_task._set_internal_attribute(subworkflow=subworkflow) return True
def _try_fire_unstructured(self, my_task, force = False): # If the threshold was already reached, there is nothing else to do. if my_task._has_state(Task.COMPLETED): return False if my_task._has_state(Task.READY): return True # The default threshold is the number of inputs. threshold = valueof(my_task, self.threshold) if threshold is None: threshold = len(self.inputs) # Look at the tree to find all places where this task is used. tasks = [] for input in self.inputs: for task in my_task.workflow.task_tree: if task.thread_id != my_task.thread_id: continue if task.task_spec != input: continue tasks.append(task) # Look up which tasks have already completed. waiting_tasks = [] completed = 0 for task in tasks: if task.parent is None or task._has_state(Task.COMPLETED): completed += 1 else: waiting_tasks.append(task) # If the threshold was reached, get ready to fire. if force or completed >= threshold: self._fire(my_task, waiting_tasks) return True # We do NOT set the task state to COMPLETED, because in # case all other incoming tasks get cancelled (or never reach # the Join for other reasons, such as reaching a stub branch), we # we need to revisit it. return False
def _try_fire_unstructured(self, my_task, force=False): # If the threshold was already reached, there is nothing else to do. if my_task._has_state(Task.COMPLETED): return False if my_task._has_state(Task.READY): return True # The default threshold is the number of inputs. threshold = valueof(my_task, self.threshold) if threshold is None: threshold = len(self.inputs) # Look at the tree to find all places where this task is used. tasks = [] for input in self.inputs: for task in my_task.workflow.task_tree: if task.thread_id != my_task.thread_id: continue if task.task_spec != input: continue tasks.append(task) # Look up which tasks have already completed. waiting_tasks = [] completed = 0 for task in tasks: if task.parent is None or task._has_state(Task.COMPLETED): completed += 1 else: waiting_tasks.append(task) # If the threshold was reached, get ready to fire. if force or completed >= threshold: self._fire(my_task, waiting_tasks) return True # We do NOT set the task state to COMPLETED, because in # case all other incoming tasks get cancelled (or never reach # the Join for other reasons, such as reaching a stub branch), we # we need to revisit it. return False
def _on_ready_before_hook(self, my_task): from SpiffWorkflow.storage import XmlSerializer from SpiffWorkflow.specs import WorkflowSpec file = valueof(my_task, self.file) serializer = XmlSerializer() xml = open(file).read() wf_spec = WorkflowSpec.deserialize(serializer, xml, filename = file) outer_workflow = my_task.workflow.outer_workflow subworkflow = SpiffWorkflow.Workflow(wf_spec, parent = outer_workflow) subworkflow.completed_event.connect(self._on_subworkflow_completed, my_task) # Integrate the tree of the subworkflow into the tree of this workflow. my_task._sync_children(self.outputs, Task.FUTURE) for child in my_task.children: child.task_spec._update_state(child) child._inherit_attributes() for child in subworkflow.task_tree.children: my_task.children.insert(0, child) child.parent = my_task my_task._set_internal_attribute(subworkflow = subworkflow)
def _on_ready_before_hook(self, my_task): from SpiffWorkflow.storage import XmlSerializer from SpiffWorkflow.specs import WorkflowSpec file = valueof(my_task, self.file) serializer = XmlSerializer() xml = open(file).read() wf_spec = WorkflowSpec.deserialize(serializer, xml, filename=file) outer_workflow = my_task.workflow.outer_workflow subworkflow = SpiffWorkflow.Workflow(wf_spec, parent=outer_workflow) subworkflow.completed_event.connect(self._on_subworkflow_completed, my_task) # Integrate the tree of the subworkflow into the tree of this workflow. my_task._sync_children(self.outputs, Task.FUTURE) for child in my_task.children: child.task_spec._update_state(child) child._inherit_attributes() for child in subworkflow.task_tree.children: my_task.children.insert(0, child) child.parent = my_task my_task._set_internal_attribute(subworkflow=subworkflow)
def _eval_assign(assign, scope): if isinstance(assign, FuncAttrib): return valueof(scope, assign) else: return dict((left, valueof(scope, right)) for left, right in assign.items())
def _eval_assign(assign, scope): if isinstance(assign, FuncAttrib): return valueof(scope, assign) else: return dict( (left, valueof(scope, right)) for left, right in assign.items())
def get_name_for(self, task): return str(valueof(task, self.name))