def __init__(self, parent, name, **kwargs): """ Constructor. May also have properties/attributes passed. The difference between the assignment of a property using property_assign versus pre_assign and post_assign is that changes made using property_assign are task-local, i.e. they are not visible to other tasks. Similarly, "defines" are properties that, once defined, can no longer be modified. @type parent: L{SpiffWorkflow.specs.WorkflowSpec} @param parent: A reference to the parent (usually a workflow). @type name: string @param name: A name for the task. @type lock: list(str) @keyword lock: A list of mutex names. The mutex is acquired on entry of execute() and released on leave of execute(). @type property_assign: list((str, object)) @keyword property_assign: a list of name/value pairs @type pre_assign: list((str, object)) @keyword pre_assign: a list of name/value pairs @type post_assign: list((str, object)) @keyword post_assign: a list of name/value pairs """ assert parent is not None assert name is not None self._parent = parent self.id = None self.name = str(name) self.description = kwargs.get('description', '') self.inputs = [] self.outputs = [] self.manual = False self.internal = False # Only for easing debugging. self.cancelled = False self.properties = kwargs.get('properties', {}) self.defines = kwargs.get('defines', {}) self.pre_assign = kwargs.get('pre_assign', []) self.post_assign = kwargs.get('post_assign', []) self.locks = kwargs.get('lock', []) self.lookahead = 2 # Maximum number of MAYBE predictions. # Events. self.entered_event = Event() self.reached_event = Event() self.ready_event = Event() self.completed_event = Event() self._parent._add_notify(self) self.properties.update(self.defines) assert self.id is not None
def __init__(self, workflow_spec, **kwargs): """ Constructor. """ assert workflow_spec is not None self.spec = workflow_spec self.task_id_assigner = TaskIdAssigner() self.attributes = {} self.outer_workflow = kwargs.get('parent', self) self.locks = {} self.last_task = None self.task_tree = Task(self, specs.Simple(workflow_spec, 'Root')) self.success = True self.debug = False # Events. self.completed_event = Event() # Prevent the root task from being executed. self.task_tree.state = Task.COMPLETED start = self.task_tree._add_child(self.spec.start) self.spec.start._predict(start) if not kwargs.has_key('parent'): start.task_spec._update_state(start)
def __init__(self, parent, name, **kwargs): """ Constructor. The difference between the assignment of a data value using the data argument versus pre_assign and post_assign is that changes made using data are task-local, i.e. they are not visible to other tasks. Similarly, "defines" are spec data fields that, once defined, can no longer be modified. :type parent: L{SpiffWorkflow.specs.WorkflowSpec} :param parent: A reference to the parent (usually a workflow). :type name: string :param name: A name for the task. :type lock: list(str) :param lock: A list of mutex names. The mutex is acquired on entry of execute() and released on leave of execute(). :type data: dict((str, object)) :param data: name/value pairs :type defines: dict((str, object)) :param defines: name/value pairs :type pre_assign: list((str, object)) :param pre_assign: a list of name/value pairs :type post_assign: list((str, object)) :param post_assign: a list of name/value pairs """ assert parent is not None assert name is not None self._parent = parent self.id = None self.name = name self.description = kwargs.get('description', '') self.inputs = [] self.outputs = [] self.manual = False self.internal = False # Only for easing debugging. self.data = kwargs.get('data', {}) self.defines = kwargs.get('defines', {}) self.pre_assign = kwargs.get('pre_assign', []) self.post_assign = kwargs.get('post_assign', []) self.locks = kwargs.get('lock', []) self.lookahead = 2 # Maximum number of MAYBE predictions. # Events. self.entered_event = Event() self.reached_event = Event() self.ready_event = Event() self.completed_event = Event() self.cancelled_event = Event() self.finished_event = Event() # Error handling self.error_handlers = [] self._parent._add_notify(self) self.data.update(self.defines) assert self.id is not None
def __init__(self, workflow_spec, deserializing=False, task_class=None, **kwargs): """ Constructor. :param deserializing: set to true when deserializing to avoid generating tasks twice (and associated problems with multiple hierarchies of tasks) """ assert workflow_spec is not None LOG.debug("__init__ Workflow instance: %s" % self.__str__()) self.spec = workflow_spec self.data = {} self.outer_workflow = kwargs.get('parent', self) self.locks = {} self.last_task = None if task_class: self.task_class = task_class else: if 'parent' in kwargs: self.task_class = kwargs['parent'].task_class else: self.task_class = Task if deserializing: assert 'Root' in workflow_spec.task_specs root = workflow_spec.task_specs['Root'] # Probably deserialized else: if 'Root' in workflow_spec.task_specs: root = workflow_spec.task_specs['Root'] else: root = specs.Simple(workflow_spec, 'Root') self.task_tree = self.task_class(self, root) self.success = True self.debug = False # Events. self.completed_event = Event() # Prevent the root task from being executed. self.task_tree.state = Task.COMPLETED start = self.task_tree._add_child(self.spec.start, state=Task.FUTURE) self.spec.start._predict(start) if 'parent' not in kwargs: start.task_spec._update_state(start)