Esempio n. 1
0
    def testSerializationWithoutKwargs(self):
        new_wf_spec = WorkflowSpec()
        serializer = DictionarySerializer()
        nokw = Celery(self.wf_spec,
                      'testnokw',
                      'call.name',
                      call_args=[Attrib('the_attribute'), 1])
        data = nokw.serialize(serializer)
        nokw2 = Celery.deserialize(serializer, new_wf_spec, data)
        self.assertDictEqual(nokw.kwargs, nokw2.kwargs)

        kw = Celery(self.wf_spec,
                    'testkw',
                    'call.name',
                    call_args=[Attrib('the_attribute'), 1],
                    some_arg={"key": "value"})
        data = kw.serialize(serializer)
        kw2 = Celery.deserialize(serializer, new_wf_spec, data)
        self.assertDictEqual(kw.kwargs, kw2.kwargs)

        # Has kwargs, but they belong to TaskSpec
        kw_defined = Celery(self.wf_spec,
                            'testkwdef',
                            'call.name',
                            call_args=[Attrib('the_attribute'), 1],
                            some_ref=Attrib('value'),
                            defines={"key": "value"})
        data = kw_defined.serialize(serializer)
        kw_defined2 = Celery.deserialize(serializer, new_wf_spec, data)
        self.assertIsInstance(kw_defined2.kwargs['some_ref'], Attrib)

        args = [
            b64encode(pickle.dumps(v)) for v in [
                Attrib('the_attribute'), 'ip',
                'dc455016e2e04a469c01a866f11c0854'
            ]
        ]

        data = {'R': b64encode(pickle.dumps('1'))}
        # Comes from live data. Bug not identified, but there we are...
        data = {
            'inputs': ['Wait:1'],
            'lookahead': 2,
            'description': '',
            'outputs': [],
            'args': args,
            'manual': False,
            'data': data,
            'locks': [],
            'pre_assign': [],
            'call': 'call.x',
            'internal': False,
            'post_assign': [],
            'id': 8,
            'result_key': None,
            'defines': data,
            'class': 'SpiffWorkflow.specs.Celery.Celery',
            'name': 'RS1:1'
        }
        Celery.deserialize(serializer, new_wf_spec, data)
Esempio n. 2
0
    def __init__(self):
        WorkflowSpec.__init__(self)

        # The first step of our workflow is to let the general confirm
        # the nuclear strike.

        #ansible_run = AnsibleRun(self, 'Ping','hostname')
        #self.start.connect(ansible_run)
        #ansible_execute = AnsibleRun(self, 'Shell', ["ping", "-t", "1", "127.0.0.1"])
        #ansible_run.connect(ansible_execute)

        data = {'post_assign': {'name': 'Test', 'value': 'TestValues'}}
        # MultiInstance对当前任务进行拆分,1:要创建的任务数
        multi_inst = MultiInstance(self, 'ansible_exec', 1)

        self.start.connect(multi_inst)

        #AnsibleRun为任务规范,引用工作流规范,给定任务规范名称
        ansible_run = AnsibleRun(self, 'Ping', 'yes')
        ansible_execute = AnsibleRun(self, 'Shell', "no")

        # TaskSpec,将给定对任务作为输出任务添加
        multi_inst.connect(ansible_run)
        multi_inst.connect(ansible_execute)

        # 同步之前分割对任务,使用MultiInstance多实例模式时,join可以跨所有实例工作,在使用ThreadSplit时join将忽略来自另一个线程的实例。
        synch_1 = Join(self, 'synch_1')
        #self.start.connect(synch_1)

        ansible_run.connect(synch_1)
        ansible_execute.connect(synch_1)

        #gate_test = Gate(self,'gate1','synch_1')
        #synch_1.connect(gate_test)

        # 实现具有一个或多个输入,和任意数量输出的任务。
        # 如果连接了多个输入,则任务执行隐式多重合并。
        # 如果连接了多个输出,则任务执行隐式并行分割。
        end = Simple(self, 'End')
        end2 = Simple(self, 'End2')

        # 表示一个if条件,其中多个条件可能同时匹配,从而创建多个传出分支。此任务有一个或多个输入,以及一个或多个传入分支。
        multichoice = MultiChoice(self, 'multi_choice_1')

        synch_1.connect(multichoice)
        cond = Equal(Attrib('Result'), 'yes')
        multichoice.connect_if(cond, end)
        cond = Equal(Attrib('Result'), 'no')
        multichoice.connect_if(cond, end2)
    def __init__(self):
        WorkflowSpec.__init__(self)

        # The first step of our workflow is to let the general confirm
        # the nuclear strike.
        general_choice = ExclusiveChoice(self, 'general')
        self.start.connect(general_choice)

        # The default choice of the general is to abort.
        cancel = Cancel(self, 'workflow_aborted')
        general_choice.connect(cancel)

        # Otherwise, we will ask the president to confirm.
        president_choice = ExclusiveChoice(self, 'president')
        cond = Equal(Attrib('confirmation'), 'yes')
        general_choice.connect_if(cond, president_choice)

        # The default choice of the president is to abort.
        president_choice.connect(cancel)

        # Otherwise, we will perform the nuclear strike.
        strike = Simple(self, 'nuclear_strike')
        president_choice.connect_if(cond, strike)

        # Now we connect our Python function to the Task named 'nuclear_strike'
        strike.completed_event.connect(my_nuclear_strike)
Esempio n. 4
0
    def setup_workflow(self, structured=True, threshold=None, cancel=False):
        wf_spec = WorkflowSpec()
        split = Simple(wf_spec, 'split')
        wf_spec.start.connect(split)

        if structured:
            join = Join(wf_spec,
                        'join',
                        threshold=threshold,
                        split_task=split.name,
                        cancel=cancel)
        else:
            join = Join(wf_spec, 'join', threshold=threshold, cancel=cancel)

        single = Simple(wf_spec, 'first', manual=True)
        default = Simple(wf_spec, 'default')
        choice = ExclusiveChoice(wf_spec, 'choice', manual=True)
        end = Simple(wf_spec, 'end')

        single.connect(join)
        join_condition = Equal(Attrib('should_join'), True)
        choice.connect_if(join_condition, join)
        choice.connect(default)

        split.connect(single)
        split.connect(choice)
        join.connect(end)

        workflow = Workflow(wf_spec)
        return workflow
Esempio n. 5
0
 def create_instance(self):
     return Celery(self.wf_spec,
                   'testtask',
                   'call.name',
                   call_args=[Attrib('the_attribute'), 1],
                   description='foo',
                   named_kw=[],
                   dict_kw={})
Esempio n. 6
0
def object_hook(dct):
    if '__uuid__' in dct:
        return uuid.UUID(dct['__uuid__'])

    if '__bytes__' in dct:
        return dct['__bytes__'].encode('ascii')
    
    if '__attrib__' in dct:
        return Attrib(dct['__attrib__'])

    return dct
Esempio n. 7
0
 def __init__(self):
     WorkflowSpec.__init__(self)
     peopleA_choice = ExclusiveChoice(self, 'peopleA')
     self.start.connect(peopleA_choice)
     cancel = Cancel(self, 'workflow_aborted')
     peopleA_choice.connect(cancel)
     peopleB_choice = ExclusiveChoice(self, 'peopleB')
     cond = Equal(Attrib('confirmation'), 'yes')
     peopleA_choice.connect_if(cond, peopleB_choice)
     peopleB_choice.connect(cancel)
     strike = Simple(self, 'nuclear_strike')
     peopleB_choice.connect_if(cond, strike)
     strike.completed_event.connect(my_nuclear_strike)
Esempio n. 8
0
    def testSerializationWithoutKwargs(self):
        new_wf_spec = WorkflowSpec()
        serializer = DictionarySerializer()
        nokw = Celery(self.wf_spec,
                      'testnokw',
                      'call.name',
                      call_args=[Attrib('the_attribute'), 1])
        data = nokw.serialize(serializer)
        nokw2 = Celery.deserialize(serializer, new_wf_spec, data)
        self.assertDictEqual(nokw.kwargs, nokw2.kwargs)

        kw = Celery(self.wf_spec,
                    'testkw',
                    'call.name',
                    call_args=[Attrib('the_attribute'), 1],
                    some_arg={"key": "value"})
        data = kw.serialize(serializer)
        kw2 = Celery.deserialize(serializer, new_wf_spec, data)
        self.assertDictEqual(kw.kwargs, kw2.kwargs)

        # Has kwargs, but they belong to TaskSpec
        kw_defined = Celery(self.wf_spec,
                            'testkwdef',
                            'call.name',
                            call_args=[Attrib('the_attribute'), 1],
                            some_ref=Attrib('value'),
                            defines={"key": "value"})
        data = kw_defined.serialize(serializer)
        kw_defined2 = Celery.deserialize(serializer, new_wf_spec, data)
        self.assertIsInstance(kw_defined2.kwargs['some_ref'], Attrib)

        # Comes from live data. Bug not identified, but there we are...
        data = {
            u'inputs': [u'Wait:1'],
            u'lookahead':
            2,
            u'description':
            u'',
            u'outputs': [],
            u'args': [[u'Attrib', u'ip'],
                      [u'spiff:value', u'dc455016e2e04a469c01a866f11c0854']],
            u'manual':
            False,
            u'properties': {
                u'R': u'1'
            },
            u'locks': [],
            u'pre_assign': [],
            u'call':
            u'call.x',
            u'internal':
            False,
            u'post_assign': [],
            u'id':
            8,
            u'result_key':
            None,
            u'defines': {
                u'R': u'1'
            },
            u'class':
            u'SpiffWorkflow.specs.Celery.Celery',
            u'name':
            u'RS1:1'
        }
        Celery.deserialize(serializer, new_wf_spec, data)