def setup_workflow(self, structured=True, threshold=None, cancel=False): wf_spec = WorkflowSpec() split = Simple(wf_spec, 'split') wf_spec.start.connect(split) if structured: join = Join(wf_spec, 'join', threshold=threshold, split_task=split.name, cancel=cancel) else: join = Join(wf_spec, 'join', threshold=threshold, cancel=cancel) single = Simple(wf_spec, 'first', manual=True) default = Simple(wf_spec, 'default') choice = ExclusiveChoice(wf_spec, 'choice', manual=True) end = Simple(wf_spec, 'end') single.connect(join) join_condition = Equal(Attrib('should_join'), True) choice.connect_if(join_condition, join) choice.connect(default) split.connect(single) split.connect(choice) join.connect(end) workflow = Workflow(wf_spec) return workflow
def testSerializationWithoutKwargs(self): new_wf_spec = WorkflowSpec() serializer = DictionarySerializer() nokw = Celery(self.wf_spec, 'testnokw', 'call.name', call_args=[Attrib('the_attribute'), 1]) data = nokw.serialize(serializer) nokw2 = Celery.deserialize(serializer, new_wf_spec, data) self.assertDictEqual(nokw.kwargs, nokw2.kwargs) kw = Celery(self.wf_spec, 'testkw', 'call.name', call_args=[Attrib('the_attribute'), 1], some_arg={"key": "value"}) data = kw.serialize(serializer) kw2 = Celery.deserialize(serializer, new_wf_spec, data) self.assertDictEqual(kw.kwargs, kw2.kwargs) # Has kwargs, but they belong to TaskSpec kw_defined = Celery(self.wf_spec, 'testkwdef', 'call.name', call_args=[Attrib('the_attribute'), 1], some_ref=Attrib('value'), defines={"key": "value"}) data = kw_defined.serialize(serializer) kw_defined2 = Celery.deserialize(serializer, new_wf_spec, data) self.assertIsInstance(kw_defined2.kwargs['some_ref'], Attrib) args = [ b64encode(pickle.dumps(v)) for v in [ Attrib('the_attribute'), 'ip', 'dc455016e2e04a469c01a866f11c0854' ] ] data = {'R': b64encode(pickle.dumps('1'))} # Comes from live data. Bug not identified, but there we are... data = { 'inputs': ['Wait:1'], 'lookahead': 2, 'description': '', 'outputs': [], 'args': args, 'manual': False, 'data': data, 'locks': [], 'pre_assign': [], 'call': 'call.x', 'internal': False, 'post_assign': [], 'id': 8, 'result_key': None, 'defines': data, 'class': 'SpiffWorkflow.specs.Celery.Celery', 'name': 'RS1:1' } Celery.deserialize(serializer, new_wf_spec, data)
def _create_subworkflow(self, my_task): from SpiffWorkflow.specs import WorkflowSpec wf_spec = WorkflowSpec(my_task.get_name()) for ts in self.outputs: wf_spec.start.connect(ts) outer_workflow = my_task.workflow.outer_workflow return SpiffWorkflow.Workflow(wf_spec, parent=outer_workflow)
def create_worlflow(): spec = WorkflowSpec() a1 = Transform(spec, 'do what you want', [ 'from SpiffWorkflow.exceptions import TaskError\n' + 'if len(my_task.parent.children) < 2: raise TaskError("test")' ]) a1.follow(spec.start) return spec, a1
def __init__(self): self.use_compact_serializer = True # self.current = None self.wf_activities = {} self.workflow = BpmnWorkflow self.workflow_spec_cache = {} self.workflow_spec = WorkflowSpec() self.user_model = get_object_from_path(settings.USER_MODEL) self.role_model = get_object_from_path(settings.ROLE_MODEL)
def testTree(self): # Build a tree. spec = WorkflowSpec() workflow = MockWorkflow() task1 = Simple(spec, 'Simple 1') task2 = Simple(spec, 'Simple 2') task3 = Simple(spec, 'Simple 3') task4 = Simple(spec, 'Simple 4') task5 = Simple(spec, 'Simple 5') task6 = Simple(spec, 'Simple 6') task7 = Simple(spec, 'Simple 7') task8 = Simple(spec, 'Simple 8') task9 = Simple(spec, 'Simple 9') root = Task(workflow, task1) c1 = root._add_child(task2) c11 = c1._add_child(task3) c111 = c11._add_child(task4) c1111 = Task(workflow, task5, c111) c112 = Task(workflow, task6, c11) c12 = Task(workflow, task7, c1) c2 = Task(workflow, task8, root) c3 = Task(workflow, task9, root) c3.state = Task.COMPLETED # Check whether the tree is built properly. expected = """!/0: Task of Simple 1 State: MAYBE Children: 3 !/0: Task of Simple 2 State: MAYBE Children: 2 !/0: Task of Simple 3 State: MAYBE Children: 2 !/0: Task of Simple 4 State: MAYBE Children: 1 !/0: Task of Simple 5 State: MAYBE Children: 0 !/0: Task of Simple 6 State: MAYBE Children: 0 !/0: Task of Simple 7 State: MAYBE Children: 0 !/0: Task of Simple 8 State: MAYBE Children: 0 !/0: Task of Simple 9 State: COMPLETED Children: 0""" expected = re.compile(expected.replace('!', r'([0-9a-f\-]+)')) self.assertTrue( expected.match(root.get_dump()), 'Expected:\n' + repr(expected.pattern) + '\n' + 'but got:\n' + repr(root.get_dump())) # Now remove one line from the expected output for testing the # filtered iterator. expected2 = '' for line in expected.pattern.split('\n'): if line.find('Simple 9') >= 0: continue expected2 += line.lstrip() + '\n' expected2 = re.compile(expected2) # Run the iterator test. result = '' for thetask in Task.Iterator(root, Task.MAYBE): result += thetask.get_dump(0, False) + '\n' self.assertTrue( expected2.match(result), 'Expected:\n' + repr(expected2.pattern) + '\n' + 'but got:\n' + repr(result))
def testTree(self): # Build a tree. spec = WorkflowSpec() workflow = MockWorkflow() task1 = Simple(spec, 'Simple 1') task2 = Simple(spec, 'Simple 2') task3 = Simple(spec, 'Simple 3') task4 = Simple(spec, 'Simple 4') task5 = Simple(spec, 'Simple 5') task6 = Simple(spec, 'Simple 6') task7 = Simple(spec, 'Simple 7') task8 = Simple(spec, 'Simple 8') task9 = Simple(spec, 'Simple 9') root = Task(workflow, task1) c1 = root._add_child(task2) c11 = c1._add_child(task3) c111 = c11._add_child(task4) c1111 = Task(workflow, task5, c111) c112 = Task(workflow, task6, c11) c12 = Task(workflow, task7, c1) c2 = Task(workflow, task8, root) c3 = Task(workflow, task9, root) c3.state = Task.COMPLETED # Check whether the tree is built properly. expected = """1/0: Task of Simple 1 State: FUTURE Children: 3 2/0: Task of Simple 2 State: FUTURE Children: 2 3/0: Task of Simple 3 State: FUTURE Children: 2 4/0: Task of Simple 4 State: FUTURE Children: 1 5/0: Task of Simple 5 State: FUTURE Children: 0 6/0: Task of Simple 6 State: FUTURE Children: 0 7/0: Task of Simple 7 State: FUTURE Children: 0 8/0: Task of Simple 8 State: FUTURE Children: 0 9/0: Task of Simple 9 State: COMPLETED Children: 0""" self.assert_(expected == root.get_dump(), 'Expected:\n' + repr(expected) + '\n' + \ 'but got:\n' + repr(root.get_dump())) # Now remove one line from the expected output for testing the # filtered iterator. expected2 = '' for line in expected.split('\n'): if line.find('Simple 9') >= 0: continue expected2 += line.lstrip() + '\n' # Run the iterator test. result = '' for thetask in Task.Iterator(root, Task.FUTURE): result += thetask.get_dump(0, False) + '\n' self.assert_(expected2 == result, 'Expected:\n' + expected2 + '\n' + \ 'but got:\n' + result)
def testSerialize(self): serializer = DictionarySerializer() spec = self.create_instance() serialized = spec.serialize(serializer) self.assert_(isinstance(serialized, dict)) new_wf_spec = WorkflowSpec() new_spec = spec.__class__.deserialize(serializer, new_wf_spec, serialized) before = spec.serialize(serializer) after = new_spec.serialize(serializer) self.assertEqual(before, after, 'Before:\n%s\nAfter:\n%s\n' % (before, after))
def testSerialize(self): serializer = DictionarySerializer() spec = self.create_instance() try: serialized = spec.serialize(serializer) self.assertIsInstance(serialized, dict) except NotImplementedError: self.assertIsInstance(spec, TaskSpec) self.assertRaises(NotImplementedError, spec.__class__.deserialize, None, None, None) return new_wf_spec = WorkflowSpec() new_spec = spec.__class__.deserialize(serializer, new_wf_spec, serialized) before = spec.serialize(serializer) after = new_spec.serialize(serializer) self.assertEqual(before, after, 'Before:\n%s\nAfter:\n%s\n' % (before, after))
def setUp(self): self.wf_spec = WorkflowSpec() self.spec = self.create_instance()
def testConstructor(self): spec = WorkflowSpec('my spec') self.assertEqual('my spec', spec.name)
def setUp(self): self.wf_spec = WorkflowSpec()
def testSerializationWithoutKwargs(self): new_wf_spec = WorkflowSpec() serializer = DictionarySerializer() nokw = Celery(self.wf_spec, 'testnokw', 'call.name', call_args=[Attrib('the_attribute'), 1]) data = nokw.serialize(serializer) nokw2 = Celery.deserialize(serializer, new_wf_spec, data) self.assertDictEqual(nokw.kwargs, nokw2.kwargs) kw = Celery(self.wf_spec, 'testkw', 'call.name', call_args=[Attrib('the_attribute'), 1], some_arg={"key": "value"}) data = kw.serialize(serializer) kw2 = Celery.deserialize(serializer, new_wf_spec, data) self.assertDictEqual(kw.kwargs, kw2.kwargs) # Has kwargs, but they belong to TaskSpec kw_defined = Celery(self.wf_spec, 'testkwdef', 'call.name', call_args=[Attrib('the_attribute'), 1], some_ref=Attrib('value'), defines={"key": "value"}) data = kw_defined.serialize(serializer) kw_defined2 = Celery.deserialize(serializer, new_wf_spec, data) self.assertIsInstance(kw_defined2.kwargs['some_ref'], Attrib) # Comes from live data. Bug not identified, but there we are... data = { u'inputs': [u'Wait:1'], u'lookahead': 2, u'description': u'', u'outputs': [], u'args': [[u'Attrib', u'ip'], [u'spiff:value', u'dc455016e2e04a469c01a866f11c0854']], u'manual': False, u'properties': { u'R': u'1' }, u'locks': [], u'pre_assign': [], u'call': u'call.x', u'internal': False, u'post_assign': [], u'id': 8, u'result_key': None, u'defines': { u'R': u'1' }, u'class': u'SpiffWorkflow.specs.Celery.Celery', u'name': u'RS1:1' } Celery.deserialize(serializer, new_wf_spec, data)
def test_Merge_data_merging(self): """Test that Merge task actually merges data""" wf_spec = WorkflowSpec() first = Simple(wf_spec, 'first') second = Simple(wf_spec, 'second') third = Simple(wf_spec, 'third') bump = Simple(wf_spec, 'bump') fourth = Simple(wf_spec, 'fourth') merge1 = Merge(wf_spec, 'merge 1') simple1 = Simple(wf_spec, 'simple 1') merge2 = Merge(wf_spec, 'merge 2') simple2 = Simple(wf_spec, 'simple 2') unmerged = Simple(wf_spec, 'unmerged') wf_spec.start.connect(first) wf_spec.start.connect(second) wf_spec.start.connect(third) wf_spec.start.connect(bump) bump.connect(fourth) # Test join at different depths in tree first.connect(merge1) second.connect(merge1) second.connect(unmerged) first.connect(merge2) second.connect(merge2) third.connect(merge2) fourth.connect(merge2) merge1.connect(simple1) merge2.connect(simple2) workflow = Workflow(wf_spec) workflow.task_tree.set_attribute(everywhere=1) for task in workflow.get_tasks(): task.set_attribute(**{'name': task.get_name(), task.get_name(): 1}) workflow.complete_all() self.assertTrue(workflow.is_completed()) found = {} for task in workflow.get_tasks(): if task.task_spec is simple1: self.assertIn('first', task.attributes) self.assertIn('second', task.attributes) self.assertDictEqual(task.attributes, {'Start': 1, 'merge 1': 1, 'name': 'Start', 'simple 1': 1, 'second': 1, 'first': 1}) found['simple1'] = task if task.task_spec is simple2: self.assertIn('first', task.attributes) self.assertIn('second', task.attributes) self.assertIn('third', task.attributes) self.assertIn('fourth', task.attributes) self.assertDictEqual(task.attributes, {'merge 2': 1, 'simple 2': 1, 'name': 'Start', 'third': 1, 'bump': 1, 'Start': 1, 'second': 1, 'first': 1, 'fourth': 1}) found['simple2'] = task if task.task_spec is unmerged: self.assertDictEqual(task.attributes, {'Start': 1, 'second': 1, 'name': 'Start', 'unmerged': 1}) found['unmerged'] = task self.assertIn('simple1', found) self.assertIn('simple2', found) self.assertIn('unmerged', found)