Exemplo n.º 1
0
 def buildNodelist(self):
     task=None
     for node in self.nodeList:
         nodeId=str(node['id'])
         nodeName=node['name']
         nodeType=node['type']
         try:
             task=self.get_task_spec_from_name(nodeName)
         except:
             if nodeType=='StartTask':
                 self.start.description=nodeId
                 task = self.start
             elif nodeType=='MultiInstance':
                 task = MultiInstance(self, nodeName, 1,description=nodeId)
             elif nodeType=='playbook':
                 file_path = node['file_path']
                 task=taskSpec(self,nodeName,description=nodeId,node_type=nodeType,file_path=file_path)
             elif nodeType=='join':
                 task=Join(self,nodeName,description=nodeId)
             elif nodeType == 'end':
                 task = Simple(self, nodeName,description=nodeId)
             else:
                 task = Simple(self, nodeName,description=nodeId)
         finally:
             print(nodeId,nodeName)
             self.task_list[nodeId]=task
     return task
Exemplo n.º 2
0
    def setup_workflow(self, structured=True, threshold=None, cancel=False):
        wf_spec = WorkflowSpec()
        split = Simple(wf_spec, 'split')
        wf_spec.start.connect(split)

        if structured:
            join = Join(wf_spec,
                        'join',
                        threshold=threshold,
                        split_task=split.name,
                        cancel=cancel)
        else:
            join = Join(wf_spec, 'join', threshold=threshold, cancel=cancel)

        single = Simple(wf_spec, 'first', manual=True)
        default = Simple(wf_spec, 'default')
        choice = ExclusiveChoice(wf_spec, 'choice', manual=True)
        end = Simple(wf_spec, 'end')

        single.connect(join)
        join_condition = Equal(Attrib('should_join'), True)
        choice.connect_if(join_condition, join)
        choice.connect(default)

        split.connect(single)
        split.connect(choice)
        join.connect(end)

        workflow = Workflow(wf_spec)
        return workflow
Exemplo n.º 3
0
    def test_ancestors_cyclic(self):
        T1 = Join(self.wf_spec, 'T1')
        T2 = Simple(self.wf_spec, 'T2')

        T1.follow(self.wf_spec.start)
        T2.follow(T1)
        T1.connect(T2)

        self.assertEquals(T1.ancestors(), [self.wf_spec.start])
        self.assertEquals(T2.ancestors(), [T1, self.wf_spec.start])
Exemplo n.º 4
0
    def test_ancestors_cyclic(self):
        T1 = Join(self.wf_spec, 'T1')
        T2 = Simple(self.wf_spec, 'T2')

        T1.follow(self.wf_spec.start)
        T2.follow(T1)
        T1.connect(T2)

        self.assertEquals(T1.ancestors(), [self.wf_spec.start])
        self.assertEquals(T2.ancestors(), [T1, self.wf_spec.start])
Exemplo n.º 5
0
    def testTree(self):
        # Build a tree.
        spec = WorkflowSpec()
        workflow = MockWorkflow()
        task1 = Simple(spec, 'Simple 1')
        task2 = Simple(spec, 'Simple 2')
        task3 = Simple(spec, 'Simple 3')
        task4 = Simple(spec, 'Simple 4')
        task5 = Simple(spec, 'Simple 5')
        task6 = Simple(spec, 'Simple 6')
        task7 = Simple(spec, 'Simple 7')
        task8 = Simple(spec, 'Simple 8')
        task9 = Simple(spec, 'Simple 9')
        root = Task(workflow, task1)
        c1 = root._add_child(task2)
        c11 = c1._add_child(task3)
        c111 = c11._add_child(task4)
        c1111 = Task(workflow, task5, c111)
        c112 = Task(workflow, task6, c11)
        c12 = Task(workflow, task7, c1)
        c2 = Task(workflow, task8, root)
        c3 = Task(workflow, task9, root)
        c3.state = Task.COMPLETED

        # Check whether the tree is built properly.
        expected = """!/0: Task of Simple 1 State: MAYBE Children: 3
  !/0: Task of Simple 2 State: MAYBE Children: 2
    !/0: Task of Simple 3 State: MAYBE Children: 2
      !/0: Task of Simple 4 State: MAYBE Children: 1
        !/0: Task of Simple 5 State: MAYBE Children: 0
      !/0: Task of Simple 6 State: MAYBE Children: 0
    !/0: Task of Simple 7 State: MAYBE Children: 0
  !/0: Task of Simple 8 State: MAYBE Children: 0
  !/0: Task of Simple 9 State: COMPLETED Children: 0"""
        expected = re.compile(expected.replace('!', r'([0-9a-f\-]+)'))
        self.assertTrue(
            expected.match(root.get_dump()),
            'Expected:\n' + repr(expected.pattern) + '\n' + 'but got:\n' +
            repr(root.get_dump()))

        # Now remove one line from the expected output for testing the
        # filtered iterator.
        expected2 = ''
        for line in expected.pattern.split('\n'):
            if line.find('Simple 9') >= 0:
                continue
            expected2 += line.lstrip() + '\n'
        expected2 = re.compile(expected2)

        # Run the iterator test.
        result = ''
        for thetask in Task.Iterator(root, Task.MAYBE):
            result += thetask.get_dump(0, False) + '\n'
        self.assertTrue(
            expected2.match(result), 'Expected:\n' + repr(expected2.pattern) +
            '\n' + 'but got:\n' + repr(result))
Exemplo n.º 6
0
    def __init__(self):
        WorkflowSpec.__init__(self)

        # The first step of our workflow is to let the general confirm
        # the nuclear strike.

        #ansible_run = AnsibleRun(self, 'Ping','hostname')
        #self.start.connect(ansible_run)
        #ansible_execute = AnsibleRun(self, 'Shell', ["ping", "-t", "1", "127.0.0.1"])
        #ansible_run.connect(ansible_execute)

        data = {'post_assign': {'name': 'Test', 'value': 'TestValues'}}
        # MultiInstance对当前任务进行拆分,1:要创建的任务数
        multi_inst = MultiInstance(self, 'ansible_exec', 1)

        self.start.connect(multi_inst)

        #AnsibleRun为任务规范,引用工作流规范,给定任务规范名称
        ansible_run = AnsibleRun(self, 'Ping', 'yes')
        ansible_execute = AnsibleRun(self, 'Shell', "no")

        # TaskSpec,将给定对任务作为输出任务添加
        multi_inst.connect(ansible_run)
        multi_inst.connect(ansible_execute)

        # 同步之前分割对任务,使用MultiInstance多实例模式时,join可以跨所有实例工作,在使用ThreadSplit时join将忽略来自另一个线程的实例。
        synch_1 = Join(self, 'synch_1')
        #self.start.connect(synch_1)

        ansible_run.connect(synch_1)
        ansible_execute.connect(synch_1)

        #gate_test = Gate(self,'gate1','synch_1')
        #synch_1.connect(gate_test)

        # 实现具有一个或多个输入,和任意数量输出的任务。
        # 如果连接了多个输入,则任务执行隐式多重合并。
        # 如果连接了多个输出,则任务执行隐式并行分割。
        end = Simple(self, 'End')
        end2 = Simple(self, 'End2')

        # 表示一个if条件,其中多个条件可能同时匹配,从而创建多个传出分支。此任务有一个或多个输入,以及一个或多个传入分支。
        multichoice = MultiChoice(self, 'multi_choice_1')

        synch_1.connect(multichoice)
        cond = Equal(Attrib('Result'), 'yes')
        multichoice.connect_if(cond, end)
        cond = Equal(Attrib('Result'), 'no')
        multichoice.connect_if(cond, end2)
Exemplo n.º 7
0
    def testTree(self):
        # Build a tree.
        spec = WorkflowSpec()
        workflow = MockWorkflow()
        task1 = Simple(spec, 'Simple 1')
        task2 = Simple(spec, 'Simple 2')
        task3 = Simple(spec, 'Simple 3')
        task4 = Simple(spec, 'Simple 4')
        task5 = Simple(spec, 'Simple 5')
        task6 = Simple(spec, 'Simple 6')
        task7 = Simple(spec, 'Simple 7')
        task8 = Simple(spec, 'Simple 8')
        task9 = Simple(spec, 'Simple 9')
        root = Task(workflow, task1)
        c1 = root._add_child(task2)
        c11 = c1._add_child(task3)
        c111 = c11._add_child(task4)
        c1111 = Task(workflow, task5, c111)
        c112 = Task(workflow, task6, c11)
        c12 = Task(workflow, task7, c1)
        c2 = Task(workflow, task8, root)
        c3 = Task(workflow, task9, root)
        c3.state = Task.COMPLETED

        # Check whether the tree is built properly.
        expected = """1/0: Task of Simple 1 State: FUTURE Children: 3
  2/0: Task of Simple 2 State: FUTURE Children: 2
    3/0: Task of Simple 3 State: FUTURE Children: 2
      4/0: Task of Simple 4 State: FUTURE Children: 1
        5/0: Task of Simple 5 State: FUTURE Children: 0
      6/0: Task of Simple 6 State: FUTURE Children: 0
    7/0: Task of Simple 7 State: FUTURE Children: 0
  8/0: Task of Simple 8 State: FUTURE Children: 0
  9/0: Task of Simple 9 State: COMPLETED Children: 0"""
        self.assert_(expected == root.get_dump(),
                     'Expected:\n' + repr(expected) + '\n' + \
                     'but got:\n'  + repr(root.get_dump()))

        # Now remove one line from the expected output for testing the
        # filtered iterator.
        expected2 = ''
        for line in expected.split('\n'):
            if line.find('Simple 9') >= 0:
                continue
            expected2 += line.lstrip() + '\n'

        # Run the iterator test.
        result = ''
        for thetask in Task.Iterator(root, Task.FUTURE):
            result += thetask.get_dump(0, False) + '\n'
        self.assert_(expected2 == result,
                     'Expected:\n' + expected2 + '\n' + \
                     'but got:\n'  + result)
Exemplo n.º 8
0
    def __init__(self):
        WorkflowSpec.__init__(self)

        # The first step of our workflow is to let the general confirm
        # the nuclear strike.
        general_choice = ExclusiveChoice(self, 'general')
        self.start.connect(general_choice)

        # The default choice of the general is to abort.
        cancel = Cancel(self, 'workflow_aborted')
        general_choice.connect(cancel)

        # Otherwise, we will ask the president to confirm.
        president_choice = ExclusiveChoice(self, 'president')
        cond = Equal(Attrib('confirmation'), 'yes')
        general_choice.connect_if(cond, president_choice)

        # The default choice of the president is to abort.
        president_choice.connect(cancel)

        # Otherwise, we will perform the nuclear strike.
        strike = Simple(self, 'nuclear_strike')
        president_choice.connect_if(cond, strike)

        # Now we connect our Python function to the Task named 'nuclear_strike'
        strike.completed_event.connect(my_nuclear_strike)
Exemplo n.º 9
0
    def testPattern(self):
        """
        Tests that we can create a task that executes an shell command
        and that the workflow can be called to complete such tasks.
        """
        task1 = Transform(self.wf_spec,
                          'First',
                          transforms=["my_task.set_attribute(foo=1)"])
        self.wf_spec.start.connect(task1)
        task2 = Transform(
            self.wf_spec,
            'Second',
            transforms=[
                "my_task.set_attribute(foo=my_task.attributes['foo']+1)",
                "my_task.set_attribute(copy=my_task.attributes['foo'])"
            ])
        task1.connect(task2)
        task3 = Simple(self.wf_spec, 'Last')
        task2.connect(task3)

        expected = 'Start\n  First\n    Second\n      Last\n'
        workflow = run_workflow(self, self.wf_spec, expected, '')
        first = workflow.get_task(3)
        last = workflow.get_task(5)
        self.assertEqual(first.attributes.get('foo'), 1)
        self.assertEqual(last.attributes.get('copy'), 2)
Exemplo n.º 10
0
    def testWaitOnInputs(self):
        """
        Tests that we can make the transform wait on its inputs and then
        continue.
        """
        # Any task that writes 'foo' as an output
        taskA1 = Transform(self.wf_spec,
                           'A1',
                           transforms=["my_task.set_attribute(foo=1)"])
        self.wf_spec.start.connect(taskA1)
        # Any task that writes 'bar' as an output after a wait
        taskA2 = Transform(self.wf_spec,
                           'A2',
                           transforms=[
                               "self.bar = getattr(self, 'bar', 0) + 1",
                               "my_task.set_attribute(bar=self.bar)",
                               "return self.bar == 4",
                           ])
        self.wf_spec.start.connect(taskA2)

        function_name = "tests.SpiffWorkflow.specs.SafeTransMergeTest." \
                        "SomeClass.my_code"
        taskB = SafeTransMerge(self.wf_spec, 'B', function_name=function_name)
        taskB.follow(taskA1)
        taskB.follow(taskA2)
        task3 = Simple(self.wf_spec, 'Last')
        taskB.connect(task3)

        expected = 'Start\n  A1\n  A2\n    B\n      Last\n'
        workflow = run_workflow(self, self.wf_spec, expected, '', max_tries=3)
        last = [t for t in workflow.get_tasks() if t.get_name() == 'Last'][0]
        self.assertEqual(last.attributes.get('my_foo'), 1)
        self.assertEqual(last.attributes.get('my_bar'), 4)
        self.assertEqual(last.attributes.get('foo'), 1)
        self.assertEqual(last.attributes.get('bar'), 4)
def test_3_retry():
    spec, a1 = create_worlflow()

    eh = ErrorHandler(spec, resolution='retry')

    c1 = Simple(spec, 'compensator #1')
    c1.follow(eh)
    a1.connect_error_handler(eh)

    a2 = Simple(spec, 'Next task post error')
    a2.follow(a1)

    run_workflow(spec)
Exemplo n.º 12
0
 def __init__(self):
     WorkflowSpec.__init__(self)
     peopleA_choice = ExclusiveChoice(self, 'peopleA')
     self.start.connect(peopleA_choice)
     cancel = Cancel(self, 'workflow_aborted')
     peopleA_choice.connect(cancel)
     peopleB_choice = ExclusiveChoice(self, 'peopleB')
     cond = Equal(Attrib('confirmation'), 'yes')
     peopleA_choice.connect_if(cond, peopleB_choice)
     peopleB_choice.connect(cancel)
     strike = Simple(self, 'nuclear_strike')
     peopleB_choice.connect_if(cond, strike)
     strike.completed_event.connect(my_nuclear_strike)
Exemplo n.º 13
0
    def testWaitOnInputs(self):
        """
        Tests that we can make the transform wait on its inputs and then
        continue.
        """
        # Any task that writes 'foo' as an output
        taskA1 = Transform(self.wf_spec,
                           'A1',
                           transforms=["my_task.set_attribute(foo=1)"])
        self.wf_spec.start.connect(taskA1)
        # Any task that writes 'bar' as an output after a wait
        taskA2 = Transform(self.wf_spec,
                           'A2',
                           transforms=[
                               "self.bar = getattr(self, 'bar', 0) + 1",
                               "my_task.set_attribute(bar=self.bar)",
                               "return self.bar == 4",
                           ])
        self.wf_spec.start.connect(taskA2)

        # Create our special task that collects outputs and proceeds when it
        # has all the fields it needs, writing them into the output task
        my_code = """
            fields = ['foo', 'bar']
            # Get fields from task and store them in spec
            for key in fields:
                if key in my_task.attributes:
                    merge_dictionary(self.properties, dict(collect={
                            'my_%s' % key: my_task.attributes[key],
                            key: my_task.attributes[key]}))
            # Check if we have all fields
            collected = self.get_property('collect', {})
            if any(key for key in fields if key not in collected):
                return False
            my_task.set_attribute(**collected)
            return True"""

        taskB = TransMerge(self.wf_spec, 'B', transforms=[my_code])
        taskB.follow(taskA1)
        taskB.follow(taskA2)
        task3 = Simple(self.wf_spec, 'Last')
        taskB.connect(task3)

        expected = 'Start\n  A1\n  A2\n    B\n      Last\n'
        workflow = run_workflow(self, self.wf_spec, expected, '', max_tries=3)
        last = [t for t in workflow.get_tasks() if t.get_name() == 'Last'][0]
        self.assertEqual(last.attributes.get('my_foo'), 1)
        self.assertEqual(last.attributes.get('my_bar'), 4)
        self.assertEqual(last.attributes.get('foo'), 1)
        self.assertEqual(last.attributes.get('bar'), 4)
def test_1_fail():
    spec, a1 = create_worlflow()

    a2 = Simple(spec, 'Next task post error')
    a2.follow(a1)

    eh = ErrorHandler(spec)

    c1 = Simple(spec, 'comp 1')
    c1.follow(eh)

    #c2 = Simple(spec, 'comp 2')
    #c2.follow(eh)

    a1.connect_error_handler(eh)

    run_workflow(spec)
Exemplo n.º 15
0
def test_3_retry():
    spec, a1 = create_worlflow()

    eh = ErrorHandler(spec, resolution='retry')

    c1 = Simple(spec, 'compensator #1')
    c1.follow(eh)
    a1.connect_error_handler(eh)

    a2 = Simple(spec, 'Next task post error')
    a2.follow(a1)

    run_workflow(spec)
Exemplo n.º 16
0
def test_1_fail():
    spec, a1 = create_worlflow()

    a2 = Simple(spec, 'Next task post error')
    a2.follow(a1)

    eh = ErrorHandler(spec)

    c1 = Simple(spec, 'comp 1')
    c1.follow(eh)

    #c2 = Simple(spec, 'comp 2')
    #c2.follow(eh)

    a1.connect_error_handler(eh)

    run_workflow(spec)
Exemplo n.º 17
0
 def buildFLows(self, beforeId, afterId):
     task = None
     # 判断之后节点在nodeIdList列表中的index
     beforenode_index = self.nodeIdList.index(beforeId)
     afternode_index = self.nodeIdList.index(afterId)
     # 获取nodeList中对应节点的数据信息
     beforeNodeInfo = self.nodeList[beforenode_index]
     afterNodeInfo = self.nodeList[afternode_index]
     beforeNodeName = beforeNodeInfo['name']
     afterNodeName = afterNodeInfo['name']
     try:
         task = self.get_task_spec_from_name(afterNodeName)
     except:
         if afterNodeInfo['type'] == 'MultiInstance':
             task = MultiInstance(self, afterNodeName, 1)
         elif afterNodeInfo['type'] == 'task':
             task = taskSpec(self, afterNodeName)
         elif afterNodeInfo['type'] == 'join':
             task = Join(self, afterNodeName)
         elif afterNodeInfo['type'] == 'end':
             task = Simple(self, 'End')
         print(afterNodeName)
     return task, afterNodeName, beforeNodeName
Exemplo n.º 18
0
    def testAncestors(self):
        T1 = Simple(self.wf_spec, 'T1')
        T2A = Simple(self.wf_spec, 'T2A')
        T2B = Simple(self.wf_spec, 'T2B')
        M = Join(self.wf_spec, 'M')
        T3 = Simple(self.wf_spec, 'T3')

        T1.follow(self.wf_spec.start)
        T2A.follow(T1)
        T2B.follow(T1)
        T2A.connect(M)
        T2B.connect(M)
        T3.follow(M)

        self.assertEquals(T1.ancestors(), [self.wf_spec.start])
        self.assertEquals(T2A.ancestors(), [T1, self.wf_spec.start])
        self.assertEquals(T2B.ancestors(), [T1, self.wf_spec.start])
        M_ancestors = M.ancestors()
        self.assertIn(T1, M_ancestors)
        self.assertIn(T2A, M_ancestors)
        self.assertIn(T2B, M_ancestors)
        self.assertIn(self.wf_spec.start, M_ancestors)
        self.assertEqual(len(T3.ancestors()), 5)
Exemplo n.º 19
0
    def testAncestors(self):
        T1 = Simple(self.wf_spec, 'T1')
        T2A = Simple(self.wf_spec, 'T2A')
        T2B = Simple(self.wf_spec, 'T2B')
        M = Join(self.wf_spec, 'M')
        T3 = Simple(self.wf_spec, 'T3')

        T1.follow(self.wf_spec.start)
        T2A.follow(T1)
        T2B.follow(T1)
        T2A.connect(M)
        T2B.connect(M)
        T3.follow(M)

        self.assertEquals(T1.ancestors(), [self.wf_spec.start])
        self.assertEquals(T2A.ancestors(), [T1, self.wf_spec.start])
        self.assertEquals(T2B.ancestors(), [T1, self.wf_spec.start])
        self.assertEquals(M.ancestors(), [T2A, T1, self.wf_spec.start, T2B])
        self.assertEqual(len(T3.ancestors()), 5)
Exemplo n.º 20
0
    def testAncestors(self):
        T1 = Simple(self.wf_spec, "T1")
        T2A = Simple(self.wf_spec, "T2A")
        T2B = Simple(self.wf_spec, "T2B")
        M = Join(self.wf_spec, "M")
        T3 = Simple(self.wf_spec, "T3")

        T1.follow(self.wf_spec.start)
        T2A.follow(T1)
        T2B.follow(T1)
        T2A.connect(M)
        T2B.connect(M)
        T3.follow(M)

        self.assertEquals(T1.ancestors(), [self.wf_spec.start])
        self.assertEquals(T2A.ancestors(), [T1, self.wf_spec.start])
        self.assertEquals(T2B.ancestors(), [T1, self.wf_spec.start])
        self.assertEquals(M.ancestors(), [T2A, T1, self.wf_spec.start, T2B])
        self.assertEqual(len(T3.ancestors()), 5)
Exemplo n.º 21
0
    def test_Merge_data_merging(self):
        """Test that Merge task actually merges data"""
        wf_spec = WorkflowSpec()
        first = Simple(wf_spec, 'first')
        second = Simple(wf_spec, 'second')
        third = Simple(wf_spec, 'third')
        bump = Simple(wf_spec, 'bump')
        fourth = Simple(wf_spec, 'fourth')
        merge1 = Merge(wf_spec, 'merge 1')
        simple1 = Simple(wf_spec, 'simple 1')
        merge2 = Merge(wf_spec, 'merge 2')
        simple2 = Simple(wf_spec, 'simple 2')
        unmerged = Simple(wf_spec, 'unmerged')

        wf_spec.start.connect(first)
        wf_spec.start.connect(second)
        wf_spec.start.connect(third)
        wf_spec.start.connect(bump)
        bump.connect(fourth)  # Test join at different depths in tree

        first.connect(merge1)
        second.connect(merge1)
        second.connect(unmerged)

        first.connect(merge2)
        second.connect(merge2)
        third.connect(merge2)
        fourth.connect(merge2)

        merge1.connect(simple1)
        merge2.connect(simple2)

        workflow = Workflow(wf_spec)
        workflow.task_tree.set_attribute(everywhere=1)
        for task in workflow.get_tasks():
            task.set_attribute(**{'name': task.get_name(), task.get_name(): 1})
        workflow.complete_all()
        self.assertTrue(workflow.is_completed())
        found = {}
        for task in workflow.get_tasks():
            if task.task_spec is simple1:
                self.assertIn('first', task.attributes)
                self.assertIn('second', task.attributes)
                self.assertDictEqual(task.attributes, {'Start': 1,
                        'merge 1': 1, 'name': 'Start', 'simple 1': 1,
                        'second': 1, 'first': 1})
                found['simple1'] = task
            if task.task_spec is simple2:
                self.assertIn('first', task.attributes)
                self.assertIn('second', task.attributes)
                self.assertIn('third', task.attributes)
                self.assertIn('fourth', task.attributes)
                self.assertDictEqual(task.attributes, {'merge 2': 1,
                        'simple 2': 1, 'name': 'Start', 'third': 1, 'bump': 1,
                        'Start': 1, 'second': 1, 'first': 1, 'fourth': 1})
                found['simple2'] = task
            if task.task_spec is unmerged:
                self.assertDictEqual(task.attributes, {'Start': 1,
                        'second': 1, 'name': 'Start', 'unmerged': 1})
                found['unmerged'] = task
        self.assertIn('simple1', found)
        self.assertIn('simple2', found)
        self.assertIn('unmerged', found)
Exemplo n.º 22
0
    def __init__(self):
        WorkflowSpec.__init__(self)

        # The first step of our workflow is to let the general confirm
        # the nuclear strike.

        #workflow_run = taskSpec(self, 'Ping','hostname')
        #self.start.connect(workflow_run)
        #workflow_execute = taskSpec(self, 'Shell', ["ping", "-t", "1", "127.0.0.1"])
        #workflow_run.connect(workflow_execute)

        # data = {'post_assign':{'name':'Test','value':'TestValues'}}
        # MultiInstance对当前任务进行拆分,1:要创建的任务数
        multi_inst = MultiInstance(self,'workflow_task',1)

        self.start.connect(multi_inst)


        #taskSpec为任务规范,引用工作流规范,给定任务规范名称
        workflow_1 = taskSpec(self, 'SQL')
        workflow_2 = taskSpec(self, '脚本')
        workflow_3 = taskSpec(self, 'SQL3')


        # TaskSpec,将给定对任务作为输出任务添加
        multi_inst.connect(workflow_1)
        multi_inst.connect(workflow_2)
        multi_inst.connect(workflow_3)

        # 同步之前分割对任务,使用MultiInstance多实例模式时,join可以跨所有实例工作,在使用ThreadSplit时join将忽略来自另一个线程的实例。
        synch_1 = Join(self, 'synch_1')
        #self.start.connect(synch_1)

        workflow_1.connect(synch_1)
        workflow_2.connect(synch_1)
        workflow_3.connect(synch_1)

        #gate_test = Gate(self,'gate1','synch_1')
        #synch_1.connect(gate_test)

        # 实现具有一个或多个输入,和任意数量输出的任务。
        # 如果连接了多个输入,则任务执行隐式多重合并。
        # 如果连接了多个输出,则任务执行隐式并行分割。
        end = Simple(self, 'End')

        # 表示一个if条件,其中多个条件可能同时匹配,从而创建多个传出分支。此任务有一个或多个输入,以及一个或多个传入分支。
        # multichoice = MultiChoice(self, 'multi_choice_1')
        #
        synch_1.connect(end)

        #gate_test.connect(end)
        #synch_1.connect(end)
        #synch_1.connect(multi_inst)

        #end = Simple(self, 'End')
        #workflow_execute.connect(end)
        # As soon as all tasks are either "completed" or  "aborted", the
        # workflow implicitely ends.


# ids = []
# for i in ids2:
#     afterNodeIds = [x for x in lineList if x['from'] == i['to']]
#     for each in afterNodeIds:
#         beforeId = each['from']
#         afterId = each['to']
#         task, afterNodeName, beforenode_index = self.buildFLows(beforeId, afterId, nodeIdList, nodeList)
#         task_list[task.name] = task
#         beforeNodeName = nodeList[beforenode_index]['name']
#         beforeNode = task_list[beforeNodeName]
#
#         if task_list[afterNodeName] not in beforeNode.outputs:
#             beforeNode.connect(task_list[afterNodeName])
#         ids.append(each)
#
# ids2 = []
# task = None
# for i in ids:
#     afterNodeIds = [x for x in lineList if x['from'] == i['to']]
#     toNode = [x['to'] for x in afterNodeIds]
#     # num=len(set(toNode))
#     for each in afterNodeIds:
#         beforeId = each['from']
#         afterId = each['to']
#         task, afterNodeName, beforenode_index = self.buildFLows(beforeId, afterId, nodeIdList, nodeList)
#         # num-=1
#         task_list[task.name] = task
#         beforeNodeName = nodeList[beforenode_index]['name']
#         beforeNode = task_list[beforeNodeName]
#
#         if task_list[afterNodeName] not in beforeNode.outputs:
#             beforeNode.connect(task_list[afterNodeName])
#         ids2.append(each)
#
# ids = []
# for i in ids2:
#     afterNodeIds = [x for x in lineList if x['from'] == i['to']]
#     for each in afterNodeIds:
#         beforeId = each['from']
#         afterId = each['to']
#         task, afterNodeName, beforenode_index = self.buildFLows(beforeId, afterId, nodeIdList, nodeList)
#         task_list[task.name] = task
#         beforeNodeName = nodeList[beforenode_index]['name']
#         beforeNode = task_list[beforeNodeName]
#
#         if task_list[afterNodeName] not in beforeNode.outputs:
#             beforeNode.connect(task_list[afterNodeName])
#         ids.append(each)
#
# ids2 = []
# task = None
# for i in ids:
#     afterNodeIds = [x for x in lineList if x['from'] == i['to']]
#     toNode = [x['to'] for x in afterNodeIds]
#     # num=len(set(toNode))
#     for each in afterNodeIds:
#         beforeId = each['from']
#         afterId = each['to']
#         task, afterNodeName, beforenode_index = self.buildFLows(beforeId, afterId, nodeIdList, nodeList)
#         # num-=1
#         task_list[task.name] = task
#         beforeNodeName = nodeList[beforenode_index]['name']
#         beforeNode = task_list[beforeNodeName]
#
#         if task_list[afterNodeName] not in beforeNode.outputs:
#             beforeNode.connect(task_list[afterNodeName])
#         ids2.append(each)
#
# ids = []
# for i in ids2:
#     afterNodeIds = [x for x in lineList if x['from'] == i['to']]
#     for each in afterNodeIds:
#         beforeId = each['from']
#         afterId = each['to']
#         task, afterNodeName, beforenode_index = self.buildFLows(beforeId, afterId, nodeIdList, nodeList)
#         task_list[task.name] = task
#         beforeNodeName = nodeList[beforenode_index]['name']
#         beforeNode = task_list[beforeNodeName]
#
#         if task_list[afterNodeName] not in beforeNode.outputs:
#             beforeNode.connect(task_list[afterNodeName])
#         ids.append(each)
#
# ids2 = []
# task = None
# for i in ids:
#     afterNodeIds = [x for x in lineList if x['from'] == i['to']]
#     toNode = [x['to'] for x in afterNodeIds]
#     # num=len(set(toNode))
#     for each in afterNodeIds:
#         beforeId = each['from']
#         afterId = each['to']
#         task, afterNodeName, beforenode_index = self.buildFLows(beforeId, afterId, nodeIdList, nodeList)
#         # num-=1
#         task_list[task.name] = task
#         beforeNodeName = nodeList[beforenode_index]['name']
#         beforeNode = task_list[beforeNodeName]
#
#         if task_list[afterNodeName] not in beforeNode.outputs:
#             beforeNode.connect(task_list[afterNodeName])
#         ids2.append(each)
#
# ids = []
# for i in ids2:
#     afterNodeIds = [x for x in lineList if x['from'] == i['to']]
#     for each in afterNodeIds:
#         beforeId = each['from']
#         afterId = each['to']
#         task, afterNodeName, beforenode_index = self.buildFLows(beforeId, afterId, nodeIdList, nodeList)
#         task_list[task.name] = task
#         beforeNodeName = nodeList[beforenode_index]['name']
#         beforeNode = task_list[beforeNodeName]
#
#         if task_list[afterNodeName] not in beforeNode.outputs:
#             beforeNode.connect(task_list[afterNodeName])
#         ids.append(each)
#
# ids2 = []
# task = None
# for i in ids:
#     afterNodeIds = [x for x in lineList if x['from'] == i['to']]
#     toNode = [x['to'] for x in afterNodeIds]
#     # num=len(set(toNode))
#     for each in afterNodeIds:
#         beforeId = each['from']
#         afterId = each['to']
#         task, afterNodeName, beforenode_index = self.buildFLows(beforeId, afterId, nodeIdList, nodeList)
#         # num-=1
#         task_list[task.name] = task
#         beforeNodeName = nodeList[beforenode_index]['name']
#         beforeNode = task_list[beforeNodeName]
#
#         if task_list[afterNodeName] not in beforeNode.outputs:
#             beforeNode.connect(task_list[afterNodeName])
#         ids2.append(each)
#
# ids = []
# for i in ids2:
#     afterNodeIds = [x for x in lineList if x['from'] == i['to']]
#     for each in afterNodeIds:
#         beforeId = each['from']
#         afterId = each['to']
#         task, afterNodeName, beforenode_index = self.buildFLows(beforeId, afterId, nodeIdList, nodeList)
#         task_list[task.name] = task
#         beforeNodeName = nodeList[beforenode_index]['name']
#         beforeNode = task_list[beforeNodeName]
#
#         if task_list[afterNodeName] not in beforeNode.outputs:
#             beforeNode.connect(task_list[afterNodeName])
#         ids.append(each)